[ 523.446390] env[68569]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=68569) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 523.446733] env[68569]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=68569) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 523.446770] env[68569]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=68569) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 523.447094] env[68569]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 523.542570] env[68569]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=68569) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 523.552557] env[68569]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=68569) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 523.596610] env[68569]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 524.156543] env[68569]: INFO nova.virt.driver [None req-015eb37a-04f6-4f65-be06-0c3069e15dbb None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 524.227437] env[68569]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 524.227661] env[68569]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 524.227733] env[68569]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=68569) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 527.138973] env[68569]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-056ce4f1-e504-4083-9157-b7ecc2b14f17 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.155758] env[68569]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=68569) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 527.155896] env[68569]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-6863ab00-f8b6-46bd-99ec-2086d974c75c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.182823] env[68569]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 55b96. [ 527.182946] env[68569]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.955s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 527.183594] env[68569]: INFO nova.virt.vmwareapi.driver [None req-015eb37a-04f6-4f65-be06-0c3069e15dbb None None] VMware vCenter version: 7.0.3 [ 527.187323] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f45bf13-4f72-452c-881f-656f0c814243 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.209365] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f508a1-efcd-418e-8076-542cec0ff3ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.215641] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba0ad63-2399-481c-ac39-40c87ed7b98b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.222353] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4288d6-30de-4d29-b7c2-50698d924378 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.235361] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aecb3397-50fb-4959-a13c-91ec6622c501 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.241314] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c60eee-a446-4803-ac87-d82da666da02 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.272276] env[68569]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-0e2a3e9e-8b69-49a7-8782-63bec9d7b667 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.277431] env[68569]: DEBUG nova.virt.vmwareapi.driver [None req-015eb37a-04f6-4f65-be06-0c3069e15dbb None None] Extension org.openstack.compute already exists. {{(pid=68569) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 527.280053] env[68569]: INFO nova.compute.provider_config [None req-015eb37a-04f6-4f65-be06-0c3069e15dbb None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 527.783250] env[68569]: DEBUG nova.context [None req-015eb37a-04f6-4f65-be06-0c3069e15dbb None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),f29df747-5aaf-439d-a313-cc552e2a0fa4(cell1) {{(pid=68569) load_cells /opt/stack/nova/nova/context.py:464}} [ 527.785306] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 527.785554] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 527.786264] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 527.786681] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] Acquiring lock "f29df747-5aaf-439d-a313-cc552e2a0fa4" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 527.786868] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] Lock "f29df747-5aaf-439d-a313-cc552e2a0fa4" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 527.787944] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] Lock "f29df747-5aaf-439d-a313-cc552e2a0fa4" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 527.807787] env[68569]: INFO dbcounter [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] Registered counter for database nova_cell0 [ 527.815952] env[68569]: INFO dbcounter [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] Registered counter for database nova_cell1 [ 528.249726] env[68569]: DEBUG oslo_db.sqlalchemy.engines [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68569) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 528.250429] env[68569]: DEBUG oslo_db.sqlalchemy.engines [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68569) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 528.255078] env[68569]: ERROR nova.db.main.api [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 528.255078] env[68569]: result = function(*args, **kwargs) [ 528.255078] env[68569]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 528.255078] env[68569]: return func(*args, **kwargs) [ 528.255078] env[68569]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 528.255078] env[68569]: result = fn(*args, **kwargs) [ 528.255078] env[68569]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 528.255078] env[68569]: return f(*args, **kwargs) [ 528.255078] env[68569]: File "/opt/stack/nova/nova/objects/service.py", line 566, in _db_service_get_minimum_version [ 528.255078] env[68569]: return db.service_get_minimum_version(context, binaries) [ 528.255078] env[68569]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 528.255078] env[68569]: _check_db_access() [ 528.255078] env[68569]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 528.255078] env[68569]: stacktrace = ''.join(traceback.format_stack()) [ 528.255078] env[68569]: [ 528.255951] env[68569]: ERROR nova.db.main.api [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 528.255951] env[68569]: result = function(*args, **kwargs) [ 528.255951] env[68569]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 528.255951] env[68569]: return func(*args, **kwargs) [ 528.255951] env[68569]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 528.255951] env[68569]: result = fn(*args, **kwargs) [ 528.255951] env[68569]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 528.255951] env[68569]: return f(*args, **kwargs) [ 528.255951] env[68569]: File "/opt/stack/nova/nova/objects/service.py", line 566, in _db_service_get_minimum_version [ 528.255951] env[68569]: return db.service_get_minimum_version(context, binaries) [ 528.255951] env[68569]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 528.255951] env[68569]: _check_db_access() [ 528.255951] env[68569]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 528.255951] env[68569]: stacktrace = ''.join(traceback.format_stack()) [ 528.255951] env[68569]: [ 528.256304] env[68569]: WARNING nova.objects.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] Failed to get minimum service version for cell f29df747-5aaf-439d-a313-cc552e2a0fa4 [ 528.256414] env[68569]: WARNING nova.objects.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 528.256846] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] Acquiring lock "singleton_lock" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 528.257010] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] Acquired lock "singleton_lock" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 528.257260] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] Releasing lock "singleton_lock" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 528.257613] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] Full set of CONF: {{(pid=68569) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 528.257762] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ******************************************************************************** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 528.257890] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] Configuration options gathered from: {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 528.258038] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 528.258234] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 528.258362] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ================================================================================ {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 528.258571] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] allow_resize_to_same_host = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.258741] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] arq_binding_timeout = 300 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.258871] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] backdoor_port = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.258997] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] backdoor_socket = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.259176] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] block_device_allocate_retries = 60 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.259336] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] block_device_allocate_retries_interval = 3 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.259503] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cert = self.pem {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.259669] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.259835] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] compute_monitors = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.259999] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] config_dir = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.260398] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] config_drive_format = iso9660 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.260545] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.260718] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] config_source = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.260889] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] console_host = devstack {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.261067] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] control_exchange = nova {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.261231] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cpu_allocation_ratio = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.261391] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] daemon = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.261557] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] debug = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.261713] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] default_access_ip_network_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.261876] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] default_availability_zone = nova {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.262044] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] default_ephemeral_format = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.262211] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] default_green_pool_size = 1000 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.262490] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.262605] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] default_schedule_zone = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.262752] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] disk_allocation_ratio = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.262909] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] enable_new_services = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.263096] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] enabled_apis = ['osapi_compute'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.263260] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] enabled_ssl_apis = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.263419] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] flat_injected = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.263575] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] force_config_drive = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.263731] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] force_raw_images = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.263896] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] graceful_shutdown_timeout = 5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.264069] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] heal_instance_info_cache_interval = -1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.264289] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] host = cpu-1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.264462] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.264623] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] initial_disk_allocation_ratio = 1.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.264783] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] initial_ram_allocation_ratio = 1.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.264989] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.265170] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] instance_build_timeout = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.265328] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] instance_delete_interval = 300 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.265515] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] instance_format = [instance: %(uuid)s] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.265683] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] instance_name_template = instance-%08x {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.265842] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] instance_usage_audit = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.266021] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] instance_usage_audit_period = month {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.266190] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.266351] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] instances_path = /opt/stack/data/nova/instances {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.266513] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] internal_service_availability_zone = internal {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.266668] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] key = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.266823] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] live_migration_retry_count = 30 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.266988] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] log_color = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.267164] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] log_config_append = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.267326] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.267482] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] log_dir = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.267662] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] log_file = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.267792] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] log_options = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.267952] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] log_rotate_interval = 1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.268133] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] log_rotate_interval_type = days {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.268298] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] log_rotation_type = none {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.268425] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.268550] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.268737] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.268903] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.269038] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.269201] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] long_rpc_timeout = 1800 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.269357] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] max_concurrent_builds = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.269511] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] max_concurrent_live_migrations = 1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.269667] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] max_concurrent_snapshots = 5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.269820] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] max_local_block_devices = 3 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.269973] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] max_logfile_count = 30 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.270143] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] max_logfile_size_mb = 200 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.270299] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] maximum_instance_delete_attempts = 5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.270461] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] metadata_listen = 0.0.0.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.270630] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] metadata_listen_port = 8775 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.270794] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] metadata_workers = 2 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.270949] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] migrate_max_retries = -1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.271126] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] mkisofs_cmd = genisoimage {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.271327] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] my_block_storage_ip = 10.180.1.21 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.271457] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] my_ip = 10.180.1.21 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.271655] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.271814] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] network_allocate_retries = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.271984] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.272161] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] osapi_compute_listen = 0.0.0.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.272319] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] osapi_compute_listen_port = 8774 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.272480] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] osapi_compute_unique_server_name_scope = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.272649] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] osapi_compute_workers = 2 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.272821] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] password_length = 12 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.273009] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] periodic_enable = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.273181] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] periodic_fuzzy_delay = 60 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.273348] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] pointer_model = usbtablet {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.273510] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] preallocate_images = none {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.273669] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] publish_errors = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.273795] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] pybasedir = /opt/stack/nova {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.273944] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ram_allocation_ratio = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.274114] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] rate_limit_burst = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.274279] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] rate_limit_except_level = CRITICAL {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.274433] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] rate_limit_interval = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.274589] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] reboot_timeout = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.274745] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] reclaim_instance_interval = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.274895] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] record = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.275072] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] reimage_timeout_per_gb = 60 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.275238] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] report_interval = 120 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.275408] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] rescue_timeout = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.275585] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] reserved_host_cpus = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.275745] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] reserved_host_disk_mb = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.275902] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] reserved_host_memory_mb = 512 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.276067] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] reserved_huge_pages = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.276227] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] resize_confirm_window = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.276382] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] resize_fs_using_block_device = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.276537] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] resume_guests_state_on_host_boot = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.276705] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.276864] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] rpc_response_timeout = 60 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.277030] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] run_external_periodic_tasks = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.277199] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] running_deleted_instance_action = reap {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.277363] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] running_deleted_instance_poll_interval = 1800 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.277570] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] running_deleted_instance_timeout = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.277738] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] scheduler_instance_sync_interval = 120 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.277906] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] service_down_time = 720 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.278084] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] servicegroup_driver = db {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.278242] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] shell_completion = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.278398] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] shelved_offload_time = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.278555] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] shelved_poll_interval = 3600 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.278718] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] shutdown_timeout = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.278874] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] source_is_ipv6 = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.279039] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ssl_only = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.279287] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.279454] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] sync_power_state_interval = 600 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.279613] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] sync_power_state_pool_size = 1000 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.279779] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] syslog_log_facility = LOG_USER {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.279933] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] tempdir = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.280103] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] timeout_nbd = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.280273] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] transport_url = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.280431] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] update_resources_interval = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.280586] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] use_cow_images = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.280742] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] use_journal = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.280895] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] use_json = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.281068] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] use_rootwrap_daemon = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.281227] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] use_stderr = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.281383] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] use_syslog = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.281537] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vcpu_pin_set = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.281703] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vif_plugging_is_fatal = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.281868] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vif_plugging_timeout = 300 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.282044] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] virt_mkfs = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.282211] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] volume_usage_poll_interval = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.282371] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] watch_log_file = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.282538] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] web = /usr/share/spice-html5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 528.282726] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.282894] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.283070] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.283246] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_concurrency.disable_process_locking = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.283550] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.283744] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.283913] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.284096] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.284269] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.284441] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.284624] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.auth_strategy = keystone {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.284792] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.compute_link_prefix = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.284966] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.285151] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.dhcp_domain = novalocal {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.285318] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.enable_instance_password = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.285502] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.glance_link_prefix = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.285676] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.285847] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.286023] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.instance_list_per_project_cells = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.286183] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.list_records_by_skipping_down_cells = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.286346] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.local_metadata_per_cell = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.286511] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.max_limit = 1000 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.286678] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.metadata_cache_expiration = 15 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.286848] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.neutron_default_tenant_id = default {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.287021] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.response_validation = warn {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.287199] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.use_neutron_default_nets = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.287365] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.287549] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.287731] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.287907] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.288086] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.vendordata_dynamic_targets = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.288252] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.vendordata_jsonfile_path = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.288429] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.288635] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.backend = dogpile.cache.memcached {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.288818] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.backend_argument = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.288982] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.backend_expiration_time = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.289167] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.config_prefix = cache.oslo {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.289339] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.dead_timeout = 60.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.289503] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.debug_cache_backend = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.289667] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.enable_retry_client = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.289826] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.enable_socket_keepalive = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.289994] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.enabled = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.290170] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.enforce_fips_mode = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.290333] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.expiration_time = 600 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.290493] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.hashclient_retry_attempts = 2 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.290679] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.hashclient_retry_delay = 1.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.290861] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.memcache_dead_retry = 300 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.291059] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.memcache_password = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.291234] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.291396] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.291558] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.memcache_pool_maxsize = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.291721] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.291880] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.memcache_sasl_enabled = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.292066] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.292275] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.memcache_socket_timeout = 1.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.292392] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.memcache_username = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.292555] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.proxies = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.292718] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.redis_db = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.292869] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.redis_password = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.293051] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.redis_sentinel_service_name = mymaster {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.293230] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.293397] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.redis_server = localhost:6379 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.293559] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.redis_socket_timeout = 1.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.293719] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.redis_username = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.293878] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.retry_attempts = 2 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.294053] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.retry_delay = 0.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.294220] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.socket_keepalive_count = 1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.294379] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.socket_keepalive_idle = 1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.294537] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.socket_keepalive_interval = 1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.294694] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.tls_allowed_ciphers = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.294848] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.tls_cafile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.295011] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.tls_certfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.295171] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.tls_enabled = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.295325] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cache.tls_keyfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.295512] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cinder.auth_section = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.295693] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cinder.auth_type = password {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.295854] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cinder.cafile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.296037] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cinder.catalog_info = volumev3::publicURL {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.296201] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cinder.certfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.296364] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cinder.collect_timing = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.296523] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cinder.cross_az_attach = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.296683] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cinder.debug = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.296838] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cinder.endpoint_template = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.296996] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cinder.http_retries = 3 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.297169] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cinder.insecure = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.297323] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cinder.keyfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.297492] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cinder.os_region_name = RegionOne {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.297668] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cinder.split_loggers = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.297829] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cinder.timeout = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.297999] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.298172] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] compute.cpu_dedicated_set = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.298328] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] compute.cpu_shared_set = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.298490] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] compute.image_type_exclude_list = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.298677] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.298852] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] compute.max_concurrent_disk_ops = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.299020] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] compute.max_disk_devices_to_attach = -1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.299188] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.299356] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.299518] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] compute.resource_provider_association_refresh = 300 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.299677] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.299835] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] compute.shutdown_retry_interval = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.300020] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.300203] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] conductor.workers = 2 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.300379] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] console.allowed_origins = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.300537] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] console.ssl_ciphers = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.300707] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] console.ssl_minimum_version = default {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.300874] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] consoleauth.enforce_session_timeout = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.301053] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] consoleauth.token_ttl = 600 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.301227] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.cafile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.301385] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.certfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.301546] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.collect_timing = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.301703] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.connect_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.301858] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.connect_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.302023] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.endpoint_override = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.302211] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.insecure = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.302368] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.keyfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.302555] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.max_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.302728] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.min_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.302943] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.region_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.303072] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.retriable_status_codes = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.303228] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.service_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.303394] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.service_type = accelerator {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.303552] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.split_loggers = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.303708] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.status_code_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.303862] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.status_code_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.304020] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.timeout = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.304201] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.304357] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] cyborg.version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.304523] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.asyncio_connection = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.304698] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.asyncio_slave_connection = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.304869] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.backend = sqlalchemy {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.305057] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.connection = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.305228] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.connection_debug = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.305407] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.connection_parameters = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.305679] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.connection_recycle_time = 3600 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.305876] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.connection_trace = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.306076] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.db_inc_retry_interval = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.306248] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.db_max_retries = 20 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.306426] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.db_max_retry_interval = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.306589] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.db_retry_interval = 1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.306765] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.max_overflow = 50 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.306931] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.max_pool_size = 5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.307108] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.max_retries = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.307281] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.307440] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.mysql_wsrep_sync_wait = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.307615] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.pool_timeout = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.307788] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.retry_interval = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.307943] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.slave_connection = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.308118] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.sqlite_synchronous = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.308280] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] database.use_db_reconnect = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.308446] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.asyncio_connection = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.308604] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.asyncio_slave_connection = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.308775] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.backend = sqlalchemy {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.308940] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.connection = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.309114] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.connection_debug = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.309284] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.connection_parameters = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.309444] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.connection_recycle_time = 3600 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.309606] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.connection_trace = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.309766] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.db_inc_retry_interval = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.309926] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.db_max_retries = 20 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.310122] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.db_max_retry_interval = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.310290] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.db_retry_interval = 1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.310450] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.max_overflow = 50 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.310610] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.max_pool_size = 5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.310771] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.max_retries = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.310938] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.311108] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.311265] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.pool_timeout = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.311424] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.retry_interval = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.311579] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.slave_connection = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.311742] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] api_database.sqlite_synchronous = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.311914] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] devices.enabled_mdev_types = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.312102] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.312276] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ephemeral_storage_encryption.default_format = luks {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.312439] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ephemeral_storage_encryption.enabled = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.312599] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.312773] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.api_servers = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.312934] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.cafile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.313098] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.certfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.313266] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.collect_timing = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.313421] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.connect_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.313576] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.connect_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.313737] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.debug = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.313900] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.default_trusted_certificate_ids = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.314070] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.enable_certificate_validation = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.314258] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.enable_rbd_download = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.314516] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.endpoint_override = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.314792] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.insecure = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.315072] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.keyfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.315339] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.max_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.315577] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.min_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.315764] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.num_retries = 3 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.315940] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.rbd_ceph_conf = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.316122] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.rbd_connect_timeout = 5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.316296] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.rbd_pool = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.316465] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.rbd_user = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.316635] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.region_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.316800] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.retriable_status_codes = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.316956] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.service_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.317140] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.service_type = image {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.317305] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.split_loggers = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.317462] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.status_code_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.317652] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.status_code_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.317820] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.timeout = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.318012] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.318188] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.verify_glance_signatures = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.318349] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] glance.version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.318533] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] guestfs.debug = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.318719] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.auth_section = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.318885] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.auth_type = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.319057] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.cafile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.319222] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.certfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.319389] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.collect_timing = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.319547] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.connect_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.319707] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.connect_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.319863] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.endpoint_override = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.320037] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.insecure = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.320199] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.keyfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.320358] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.max_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.320513] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.min_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.320670] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.region_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.320823] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.retriable_status_codes = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.320976] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.service_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.321160] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.service_type = shared-file-system {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.321325] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.share_apply_policy_timeout = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.321488] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.split_loggers = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.321647] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.status_code_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.321802] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.status_code_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.321958] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.timeout = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.322153] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.322315] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] manila.version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.322485] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] mks.enabled = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.322866] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.323072] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] image_cache.manager_interval = 2400 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.323244] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] image_cache.precache_concurrency = 1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.323419] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] image_cache.remove_unused_base_images = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.323653] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.323858] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.324063] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] image_cache.subdirectory_name = _base {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.324249] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.api_max_retries = 60 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.324416] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.api_retry_interval = 2 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.324576] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.auth_section = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.324742] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.auth_type = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.324899] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.cafile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.325067] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.certfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.325235] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.collect_timing = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.325448] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.conductor_group = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.325632] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.connect_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.325797] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.connect_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.325955] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.endpoint_override = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.326134] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.insecure = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.326295] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.keyfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.326452] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.max_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.326609] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.min_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.326779] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.peer_list = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.326937] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.region_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.327110] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.retriable_status_codes = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.327278] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.serial_console_state_timeout = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.327436] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.service_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.327641] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.service_type = baremetal {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.327812] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.shard = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.327979] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.split_loggers = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.328155] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.status_code_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.328317] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.status_code_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.328479] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.timeout = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.328688] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.328855] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ironic.version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.329053] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.329232] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] key_manager.fixed_key = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.329415] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.329575] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.barbican_api_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.329738] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.barbican_endpoint = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.329936] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.barbican_endpoint_type = public {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.330122] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.barbican_region_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.330287] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.cafile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.330447] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.certfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.330609] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.collect_timing = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.330773] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.insecure = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.330931] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.keyfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.331110] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.number_of_retries = 60 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.331275] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.retry_delay = 1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.331440] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.send_service_user_token = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.331601] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.split_loggers = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.331761] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.timeout = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.331924] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.verify_ssl = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.332094] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican.verify_ssl_path = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.332264] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican_service_user.auth_section = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.332425] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican_service_user.auth_type = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.332623] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican_service_user.cafile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.332808] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican_service_user.certfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.332978] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican_service_user.collect_timing = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.333158] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican_service_user.insecure = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.333352] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican_service_user.keyfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.333478] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican_service_user.split_loggers = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.333635] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] barbican_service_user.timeout = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.333807] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vault.approle_role_id = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.333966] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vault.approle_secret_id = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.334149] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vault.kv_mountpoint = secret {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.334311] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vault.kv_path = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.334473] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vault.kv_version = 2 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.334632] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vault.namespace = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.334790] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vault.root_token_id = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.334949] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vault.ssl_ca_crt_file = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.335132] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vault.timeout = 60.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.335299] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vault.use_ssl = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.335479] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.335649] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.cafile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.335813] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.certfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.335978] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.collect_timing = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.336150] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.connect_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.336308] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.connect_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.336467] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.endpoint_override = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.336629] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.insecure = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.336790] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.keyfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.336949] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.max_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.337118] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.min_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.337277] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.region_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.337437] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.retriable_status_codes = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.337625] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.service_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.337802] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.service_type = identity {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.337965] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.split_loggers = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.338134] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.status_code_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.338290] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.status_code_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.338445] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.timeout = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.338647] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.338813] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] keystone.version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.339016] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.ceph_mount_options = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.339441] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.339626] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.connection_uri = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.339793] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.cpu_mode = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.339983] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.cpu_model_extra_flags = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.340177] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.cpu_models = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.340352] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.cpu_power_governor_high = performance {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.340525] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.cpu_power_governor_low = powersave {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.340697] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.cpu_power_management = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.340870] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.341049] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.device_detach_attempts = 8 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.341219] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.device_detach_timeout = 20 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.341385] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.disk_cachemodes = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.341543] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.disk_prefix = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.341706] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.enabled_perf_events = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.341884] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.file_backed_memory = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.342083] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.gid_maps = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.342249] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.hw_disk_discard = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.342406] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.hw_machine_type = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.342577] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.images_rbd_ceph_conf = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.342743] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.342907] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.343095] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.images_rbd_glance_store_name = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.343268] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.images_rbd_pool = rbd {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.343451] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.images_type = default {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.343594] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.images_volume_group = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.343757] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.inject_key = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.343919] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.inject_partition = -2 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.344092] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.inject_password = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.344256] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.iscsi_iface = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.344416] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.iser_use_multipath = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.344586] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.live_migration_bandwidth = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.344782] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.344954] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.live_migration_downtime = 500 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.345134] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.345301] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.345462] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.live_migration_inbound_addr = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.345624] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.345786] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.live_migration_permit_post_copy = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.345945] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.live_migration_scheme = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.346132] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.live_migration_timeout_action = abort {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.346296] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.live_migration_tunnelled = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.346454] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.live_migration_uri = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.346617] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.live_migration_with_native_tls = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.346778] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.max_queues = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.346941] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.347184] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.347349] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.nfs_mount_options = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.347692] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.347863] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.348046] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.num_iser_scan_tries = 5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.348213] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.num_memory_encrypted_guests = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.348378] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.348570] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.num_pcie_ports = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.348746] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.num_volume_scan_tries = 5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.348914] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.pmem_namespaces = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.349086] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.quobyte_client_cfg = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.349386] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.349563] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.rbd_connect_timeout = 5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.349731] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.349916] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.350096] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.rbd_secret_uuid = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.350257] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.rbd_user = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.350420] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.350591] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.remote_filesystem_transport = ssh {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.350751] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.rescue_image_id = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.350908] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.rescue_kernel_id = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.351080] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.rescue_ramdisk_id = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.351252] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.351413] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.rx_queue_size = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.351581] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.smbfs_mount_options = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.351869] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.352064] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.snapshot_compression = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.352228] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.snapshot_image_format = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.352456] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.352614] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.sparse_logical_volumes = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.352778] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.swtpm_enabled = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.352946] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.swtpm_group = tss {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.353127] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.swtpm_user = tss {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.353299] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.sysinfo_serial = unique {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.353456] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.tb_cache_size = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.353613] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.tx_queue_size = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.353783] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.uid_maps = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.353966] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.use_virtio_for_bridges = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.354162] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.virt_type = kvm {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.354335] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.volume_clear = zero {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.354502] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.volume_clear_size = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.354665] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.volume_enforce_multipath = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.354831] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.volume_use_multipath = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.354989] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.vzstorage_cache_path = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.355173] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.355340] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.vzstorage_mount_group = qemu {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.355564] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.vzstorage_mount_opts = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.355675] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.355954] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.356150] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.vzstorage_mount_user = stack {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.356319] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.356493] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.auth_section = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.356701] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.auth_type = password {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.356906] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.cafile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.357088] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.certfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.357257] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.collect_timing = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.357416] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.connect_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.357602] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.connect_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.357781] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.default_floating_pool = public {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.357941] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.endpoint_override = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.358119] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.extension_sync_interval = 600 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.358284] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.http_retries = 3 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.358469] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.insecure = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.358608] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.keyfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.358766] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.max_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.358971] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.359218] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.min_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.359411] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.ovs_bridge = br-int {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.359582] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.physnets = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.359756] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.region_name = RegionOne {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.359941] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.retriable_status_codes = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.360134] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.service_metadata_proxy = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.360297] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.service_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.360801] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.service_type = network {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.360801] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.split_loggers = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.360801] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.status_code_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.360997] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.status_code_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.361072] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.timeout = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.361257] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.361418] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] neutron.version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.361591] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] notifications.bdms_in_notifications = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.361770] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] notifications.default_level = INFO {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.361934] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] notifications.include_share_mapping = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.362122] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] notifications.notification_format = unversioned {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.362286] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] notifications.notify_on_state_change = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.362459] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.362637] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] pci.alias = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.362808] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] pci.device_spec = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.362970] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] pci.report_in_placement = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.363160] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.auth_section = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.363335] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.auth_type = password {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.363502] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.363661] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.cafile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.363818] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.certfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.363979] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.collect_timing = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.364153] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.connect_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.364309] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.connect_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.364465] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.default_domain_id = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.364622] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.default_domain_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.364779] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.domain_id = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.364934] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.domain_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.365102] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.endpoint_override = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.365264] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.insecure = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.365422] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.keyfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.365574] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.max_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.365729] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.min_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.365902] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.password = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.366091] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.project_domain_id = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.366266] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.project_domain_name = Default {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.366434] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.project_id = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.366605] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.project_name = service {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.366776] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.region_name = RegionOne {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.366945] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.retriable_status_codes = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.367122] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.service_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.367295] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.service_type = placement {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.367459] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.split_loggers = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.367646] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.status_code_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.367813] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.status_code_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.367974] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.system_scope = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.368147] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.timeout = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.368305] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.trust_id = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.368466] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.user_domain_id = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.368630] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.user_domain_name = Default {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.368791] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.user_id = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.368962] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.username = nova {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.369156] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.369319] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] placement.version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.369501] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] quota.cores = 20 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.369671] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] quota.count_usage_from_placement = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.369857] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.370067] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] quota.injected_file_content_bytes = 10240 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.370244] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] quota.injected_file_path_length = 255 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.370410] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] quota.injected_files = 5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.370575] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] quota.instances = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.370741] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] quota.key_pairs = 100 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.370906] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] quota.metadata_items = 128 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.371081] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] quota.ram = 51200 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.371249] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] quota.recheck_quota = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.371413] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] quota.server_group_members = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.371577] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] quota.server_groups = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.371786] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.371961] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] quota.unified_limits_resource_strategy = require {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.372152] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.372319] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.372481] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] scheduler.image_metadata_prefilter = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.372643] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.372807] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] scheduler.max_attempts = 3 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.372988] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] scheduler.max_placement_results = 1000 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.373139] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.373302] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] scheduler.query_placement_for_image_type_support = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.373461] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.373632] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] scheduler.workers = 2 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.373852] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.373973] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.374166] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.374334] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.374498] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.374660] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.374822] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.375027] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.375191] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.host_subset_size = 1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.375355] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.375512] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.375673] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.375852] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.image_props_weight_setting = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.376041] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.376210] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.isolated_hosts = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.376372] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.isolated_images = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.376534] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.376695] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.376856] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.377025] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.pci_in_placement = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.377192] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.377353] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.377543] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.377716] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.377880] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.378078] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.378250] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.track_instance_changes = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.378427] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.378598] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] metrics.required = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.378763] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] metrics.weight_multiplier = 1.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.378924] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.379097] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] metrics.weight_setting = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.379408] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.379580] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] serial_console.enabled = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.379756] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] serial_console.port_range = 10000:20000 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.379925] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.380106] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.380276] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] serial_console.serialproxy_port = 6083 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.380441] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] service_user.auth_section = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.380611] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] service_user.auth_type = password {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.380772] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] service_user.cafile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.380928] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] service_user.certfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.381101] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] service_user.collect_timing = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.381261] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] service_user.insecure = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.381414] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] service_user.keyfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.381585] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] service_user.send_service_user_token = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.381752] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] service_user.split_loggers = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.381910] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] service_user.timeout = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.382092] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] spice.agent_enabled = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.382257] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] spice.enabled = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.382561] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.382774] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.382948] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] spice.html5proxy_port = 6082 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.383127] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] spice.image_compression = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.383288] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] spice.jpeg_compression = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.383448] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] spice.playback_compression = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.383608] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] spice.require_secure = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.383777] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] spice.server_listen = 127.0.0.1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.383946] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.384236] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.384408] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] spice.streaming_mode = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.384568] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] spice.zlib_compression = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.384735] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] upgrade_levels.baseapi = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.384906] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] upgrade_levels.compute = auto {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.385094] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] upgrade_levels.conductor = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.385260] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] upgrade_levels.scheduler = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.385425] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vendordata_dynamic_auth.auth_section = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.385584] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vendordata_dynamic_auth.auth_type = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.385740] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vendordata_dynamic_auth.cafile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.385892] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vendordata_dynamic_auth.certfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.386062] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.386222] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vendordata_dynamic_auth.insecure = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.386377] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vendordata_dynamic_auth.keyfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.386537] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.386693] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vendordata_dynamic_auth.timeout = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.386864] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.api_retry_count = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.387034] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.ca_file = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.387208] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.cache_prefix = devstack-image-cache {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.387375] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.cluster_name = testcl1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.387570] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.connection_pool_size = 10 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.387740] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.console_delay_seconds = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.387910] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.datastore_regex = ^datastore.* {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.388135] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.388310] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.host_password = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.388497] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.host_port = 443 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.388680] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.host_username = administrator@vsphere.local {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.388851] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.insecure = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.389019] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.integration_bridge = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.389186] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.maximum_objects = 100 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.389415] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.pbm_default_policy = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.389495] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.pbm_enabled = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.389648] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.pbm_wsdl_location = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.389827] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.389997] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.serial_port_proxy_uri = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.390202] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.serial_port_service_uri = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.390377] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.task_poll_interval = 0.5 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.390550] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.use_linked_clone = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.390721] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.vnc_keymap = en-us {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.390883] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.vnc_port = 5900 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.391071] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vmware.vnc_port_total = 10000 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.391266] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vnc.auth_schemes = ['none'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.391442] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vnc.enabled = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.391738] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.391924] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.392108] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vnc.novncproxy_port = 6080 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.392303] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vnc.server_listen = 127.0.0.1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.392482] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.392644] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vnc.vencrypt_ca_certs = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.392800] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vnc.vencrypt_client_cert = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.392956] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vnc.vencrypt_client_key = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.393163] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.393337] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.disable_deep_image_inspection = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.393497] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.393661] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.393819] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.393976] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.disable_rootwrap = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.394185] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.enable_numa_live_migration = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.394356] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.394520] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.394683] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.394843] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.libvirt_disable_apic = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.395013] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.395182] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.395489] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.395664] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.395830] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.395992] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.396169] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.396331] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.396491] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.396655] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.396838] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.397012] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] wsgi.client_socket_timeout = 900 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.397185] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] wsgi.default_pool_size = 1000 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.397351] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] wsgi.keep_alive = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.397533] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] wsgi.max_header_line = 16384 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.397707] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] wsgi.secure_proxy_ssl_header = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.397868] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] wsgi.ssl_ca_file = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.398038] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] wsgi.ssl_cert_file = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.398201] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] wsgi.ssl_key_file = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.398365] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] wsgi.tcp_keepidle = 600 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.398568] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.398742] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] zvm.ca_file = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.398906] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] zvm.cloud_connector_url = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.399206] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.399381] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] zvm.reachable_timeout = 300 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.399552] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.399731] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.399906] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] profiler.connection_string = messaging:// {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.400086] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] profiler.enabled = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.400256] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] profiler.es_doc_type = notification {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.400417] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] profiler.es_scroll_size = 10000 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.400581] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] profiler.es_scroll_time = 2m {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.400744] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] profiler.filter_error_trace = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.400908] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] profiler.hmac_keys = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.401082] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] profiler.sentinel_service_name = mymaster {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.401249] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] profiler.socket_timeout = 0.1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.401409] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] profiler.trace_requests = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.401566] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] profiler.trace_sqlalchemy = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.401748] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] profiler_jaeger.process_tags = {} {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.401904] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] profiler_jaeger.service_name_prefix = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.402075] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] profiler_otlp.service_name_prefix = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.402241] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] remote_debug.host = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.402396] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] remote_debug.port = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.402570] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.402734] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.402895] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.403071] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.403237] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.403397] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.403557] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.403719] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.403880] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.404060] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.404223] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.404393] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.404561] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.404727] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.404890] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.405070] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.405238] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.405402] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.405570] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.405734] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.405895] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.406070] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.406240] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.406402] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.406562] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.406723] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.406883] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.407048] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.407214] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.407374] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.407560] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.ssl = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.407740] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.407910] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.408086] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.408257] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.408423] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.ssl_version = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.408616] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.408814] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.408981] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_notifications.retry = -1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.409173] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.409345] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_messaging_notifications.transport_url = **** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.409516] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.auth_section = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.409680] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.auth_type = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.409856] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.cafile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.410025] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.certfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.410193] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.collect_timing = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.410351] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.connect_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.410541] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.connect_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.410659] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.endpoint_id = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.410832] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.endpoint_interface = publicURL {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.411030] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.endpoint_override = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.411149] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.endpoint_region_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.411306] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.endpoint_service_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.411461] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.endpoint_service_type = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.411622] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.insecure = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.411780] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.keyfile = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.411933] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.max_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.412119] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.min_version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.412278] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.region_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.412435] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.retriable_status_codes = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.412592] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.service_name = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.412747] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.service_type = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.412905] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.split_loggers = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.413071] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.status_code_retries = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.413231] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.status_code_retry_delay = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.413384] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.timeout = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.413538] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.valid_interfaces = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.413693] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_limit.version = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.413859] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_reports.file_event_handler = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.414033] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.414197] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] oslo_reports.log_dir = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.414367] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.414524] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.414682] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.414846] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.415015] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.415183] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.415352] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.415511] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vif_plug_ovs_privileged.group = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.415669] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.415833] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.415994] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.416165] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] vif_plug_ovs_privileged.user = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.416336] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_vif_linux_bridge.flat_interface = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.416517] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.416693] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.416863] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.417042] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.417216] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.417380] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.417569] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.417753] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.417925] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_vif_ovs.isolate_vif = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.418108] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.418275] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.418444] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.418640] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_vif_ovs.ovsdb_interface = native {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.418807] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] os_vif_ovs.per_port_bridge = False {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.418977] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] privsep_osbrick.capabilities = [21] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.419150] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] privsep_osbrick.group = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.419307] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] privsep_osbrick.helper_command = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.419470] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.419632] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.419792] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] privsep_osbrick.user = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.420037] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.420215] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] nova_sys_admin.group = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.420374] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] nova_sys_admin.helper_command = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.420540] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.420705] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.420861] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] nova_sys_admin.user = None {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 528.420990] env[68569]: DEBUG oslo_service.backend.eventlet.service [None req-bb6a652d-0e56-4237-b1a9-f58f1590dcd2 None None] ******************************************************************************** {{(pid=68569) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 528.421401] env[68569]: INFO nova.service [-] Starting compute node (version 31.1.0) [ 528.925935] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Getting list of instances from cluster (obj){ [ 528.925935] env[68569]: value = "domain-c8" [ 528.925935] env[68569]: _type = "ClusterComputeResource" [ 528.925935] env[68569]: } {{(pid=68569) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 528.927024] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ac884e-eda5-4d7d-be29-08609d832c62 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.936021] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Got total of 0 instances {{(pid=68569) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 528.936651] env[68569]: WARNING nova.virt.vmwareapi.driver [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 528.937180] env[68569]: INFO nova.virt.node [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Generated node identity a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 [ 528.937518] env[68569]: INFO nova.virt.node [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Wrote node identity a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 to /opt/stack/data/n-cpu-1/compute_id [ 529.441466] env[68569]: WARNING nova.compute.manager [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Compute nodes ['a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 530.448326] env[68569]: INFO nova.compute.manager [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 531.454348] env[68569]: WARNING nova.compute.manager [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 531.454687] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 531.454824] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 531.454943] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 531.455104] env[68569]: DEBUG nova.compute.resource_tracker [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68569) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 531.456077] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c259b2dd-807d-4f38-a895-485dbf9b2a08 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.464676] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26af2f92-2ca3-4e32-95f2-ffa585577ab4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.479757] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-584f68b8-4e3a-40f1-9be9-ee8de4af378b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.486037] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480fc2f4-6302-49ae-8bec-8fb5f0d4bded {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.515353] env[68569]: DEBUG nova.compute.resource_tracker [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181022MB free_disk=129GB free_vcpus=48 pci_devices=None {{(pid=68569) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 531.515518] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 531.515699] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 532.018865] env[68569]: WARNING nova.compute.resource_tracker [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] No compute node record for cpu-1:a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 could not be found. [ 532.523020] env[68569]: INFO nova.compute.resource_tracker [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 [ 534.031746] env[68569]: DEBUG nova.compute.resource_tracker [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 534.032181] env[68569]: DEBUG nova.compute.resource_tracker [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 534.191680] env[68569]: INFO nova.scheduler.client.report [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] [req-24a8028d-1f16-4864-a8f5-c3a639437ea6] Created resource provider record via placement API for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 534.206950] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08966a9-1f86-4f04-a288-abfe3dad97a0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.214930] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10165c02-18a9-4cfe-91ee-51189ac24893 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.244825] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d945fd36-332a-41f7-b336-2f0a85e04deb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.251621] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b125ceaf-db32-48da-bb29-01d2d940cd97 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.264214] env[68569]: DEBUG nova.compute.provider_tree [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 534.803324] env[68569]: DEBUG nova.scheduler.client.report [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 534.803564] env[68569]: DEBUG nova.compute.provider_tree [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 0 to 1 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 534.803712] env[68569]: DEBUG nova.compute.provider_tree [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 534.853115] env[68569]: DEBUG nova.compute.provider_tree [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 1 to 2 during operation: update_traits {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 535.357371] env[68569]: DEBUG nova.compute.resource_tracker [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 535.357853] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.842s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 535.357853] env[68569]: DEBUG nova.service [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Creating RPC server for service compute {{(pid=68569) start /opt/stack/nova/nova/service.py:186}} [ 535.371998] env[68569]: DEBUG nova.service [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] Join ServiceGroup membership for this service compute {{(pid=68569) start /opt/stack/nova/nova/service.py:203}} [ 535.372222] env[68569]: DEBUG nova.servicegroup.drivers.db [None req-c5b45593-d752-4704-9104-d63d4377cd8d None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=68569) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 572.302235] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Acquiring lock "2c47ef2c-873a-4cb3-9a36-aa2155911b6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 572.302235] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Lock "2c47ef2c-873a-4cb3-9a36-aa2155911b6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 572.806857] env[68569]: DEBUG nova.compute.manager [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 573.362440] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 573.362749] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 573.366107] env[68569]: INFO nova.compute.claims [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 574.430141] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc82f75-8b68-4c34-9c4e-f045eb55688e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.439151] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151f88ad-844c-4abd-9f37-f5dae465490c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.476366] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de890e1-cde3-4340-9575-b7859510265e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.483368] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013fbbd7-6dcc-40c0-b228-f4962de0550d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.498689] env[68569]: DEBUG nova.compute.provider_tree [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.003049] env[68569]: DEBUG nova.scheduler.client.report [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 575.091881] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 575.092168] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 575.281561] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "3ee3365b-0799-414b-b2a1-1d219bd9db96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 575.281561] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "3ee3365b-0799-414b-b2a1-1d219bd9db96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 575.518012] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.155s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 575.518326] env[68569]: DEBUG nova.compute.manager [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 575.595223] env[68569]: DEBUG nova.compute.manager [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 575.782084] env[68569]: DEBUG nova.compute.manager [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 576.027646] env[68569]: DEBUG nova.compute.utils [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 576.030538] env[68569]: DEBUG nova.compute.manager [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Not allocating networking since 'none' was specified. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 576.120412] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 576.120495] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 576.127099] env[68569]: INFO nova.compute.claims [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 576.159488] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Acquiring lock "d6c45731-d76a-46cf-9b7d-be035a200948" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 576.159884] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Lock "d6c45731-d76a-46cf-9b7d-be035a200948" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 576.225236] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Acquiring lock "7693ef68-d7e5-4899-9615-9f2a1dd0bce8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 576.226373] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Lock "7693ef68-d7e5-4899-9615-9f2a1dd0bce8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 576.310436] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 576.532669] env[68569]: DEBUG nova.compute.manager [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 576.594834] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquiring lock "87f6c6d0-4e3d-4608-bd0d-4771bc9c174a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 576.595129] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Lock "87f6c6d0-4e3d-4608-bd0d-4771bc9c174a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 576.663325] env[68569]: DEBUG nova.compute.manager [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 576.728140] env[68569]: DEBUG nova.compute.manager [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 577.100649] env[68569]: DEBUG nova.compute.manager [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 577.198766] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 577.268967] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 577.269981] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "925400c4-3b33-4f4a-9f63-3ceec06cf0b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 577.271028] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "925400c4-3b33-4f4a-9f63-3ceec06cf0b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 577.356140] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e643180-b093-4e4f-85ce-757a2f00fcc5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.369669] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cc091e-c21b-482f-a6e9-b9d3b247d3ea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.407388] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d55b708d-e012-4de1-a002-e8839f107183 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.419069] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db83f29e-3f24-48d1-b87c-f5558cc614dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.433436] env[68569]: DEBUG nova.compute.provider_tree [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 577.555103] env[68569]: DEBUG nova.compute.manager [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 577.630964] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 577.772667] env[68569]: DEBUG nova.compute.manager [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 577.938325] env[68569]: DEBUG nova.scheduler.client.report [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 577.992369] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Acquiring lock "7129a57f-e639-49ae-96a9-3c1d966034a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 577.992718] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Lock "7129a57f-e639-49ae-96a9-3c1d966034a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 578.026938] env[68569]: DEBUG nova.virt.hardware [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 578.026938] env[68569]: DEBUG nova.virt.hardware [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 578.026938] env[68569]: DEBUG nova.virt.hardware [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 578.027086] env[68569]: DEBUG nova.virt.hardware [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 578.027086] env[68569]: DEBUG nova.virt.hardware [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 578.027086] env[68569]: DEBUG nova.virt.hardware [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 578.028066] env[68569]: DEBUG nova.virt.hardware [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 578.028066] env[68569]: DEBUG nova.virt.hardware [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 578.028066] env[68569]: DEBUG nova.virt.hardware [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 578.028205] env[68569]: DEBUG nova.virt.hardware [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 578.028325] env[68569]: DEBUG nova.virt.hardware [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 578.029802] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f7710d-bc6e-4750-a08f-f9f3ba35bed3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.041248] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbea1e19-4958-4d54-b174-2bae396b1e8a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.055753] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473ffa1f-eb13-4990-9e79-9b4151467afc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.076555] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 578.086147] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 578.086936] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-313fcfd4-de60-44e8-9390-c6fb126fee38 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.105198] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Created folder: OpenStack in parent group-v4. [ 578.105198] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Creating folder: Project (542d095a6f364e6a87be9244706552dc). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 578.105198] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-440a9537-5d54-4fc5-9d67-057eac9ca4cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.117492] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Created folder: Project (542d095a6f364e6a87be9244706552dc) in parent group-v633430. [ 578.119275] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Creating folder: Instances. Parent ref: group-v633431. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 578.119275] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ab658df-c3de-45dd-88f6-6395b26c0cdc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.131214] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Created folder: Instances in parent group-v633431. [ 578.131214] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 578.131214] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 578.131214] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c880528-c11d-4a0a-8d0d-7ad67f1526c2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.164618] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 578.164618] env[68569]: value = "task-3166382" [ 578.164618] env[68569]: _type = "Task" [ 578.164618] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.174103] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166382, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.304966] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 578.444118] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.323s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 578.445328] env[68569]: DEBUG nova.compute.manager [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 578.448756] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.139s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 578.450615] env[68569]: INFO nova.compute.claims [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 578.495527] env[68569]: DEBUG nova.compute.manager [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 578.681194] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166382, 'name': CreateVM_Task, 'duration_secs': 0.342031} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.681428] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 578.686022] env[68569]: DEBUG oslo_vmware.service [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3df679b-e211-4bf9-b891-41a37a2a9bdb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.692669] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.693188] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 578.694847] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 578.695278] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df75c8b3-ca2f-43eb-b7ea-5265e52c0047 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.703288] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for the task: (returnval){ [ 578.703288] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525597c8-6d7b-8ca3-d7ab-5510a274af51" [ 578.703288] env[68569]: _type = "Task" [ 578.703288] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.716801] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525597c8-6d7b-8ca3-d7ab-5510a274af51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.963714] env[68569]: DEBUG nova.compute.utils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 578.964848] env[68569]: DEBUG nova.compute.manager [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 578.965109] env[68569]: DEBUG nova.network.neutron [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 579.040941] env[68569]: DEBUG nova.policy [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a04470ed28074c2180e39df4ff9d9b5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef27055c27a04f7e9199b9c02efa7fcf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 579.044638] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 579.100278] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Acquiring lock "8eb18d79-e164-4e66-83b0-7b40d04c30a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 579.100524] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Lock "8eb18d79-e164-4e66-83b0-7b40d04c30a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 579.215036] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 579.215393] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 579.215674] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.215777] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 579.216225] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 579.218242] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6ae0c7f-ae93-461d-b417-84f429f93e2b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.235150] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 579.235150] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 579.236024] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6b1a7b-5808-450a-a342-eee8bbee8824 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.245102] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cccbabeb-b088-442f-bb0c-026e12470acf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.251393] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for the task: (returnval){ [ 579.251393] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5239e9a9-1dda-a7a4-4279-47472665360b" [ 579.251393] env[68569]: _type = "Task" [ 579.251393] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.260025] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5239e9a9-1dda-a7a4-4279-47472665360b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.473582] env[68569]: DEBUG nova.compute.manager [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 579.605416] env[68569]: DEBUG nova.compute.manager [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 579.691742] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61aff0f7-a35f-40f7-afcb-8356c3eb5b6e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.700221] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f127298-2242-41e3-892d-70d59d38ee2d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.741792] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727c03ff-192e-4b51-b08f-b46b36e3295b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.750609] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700ead0b-f04a-4aac-b922-cac6f059f7f7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.772753] env[68569]: DEBUG nova.compute.provider_tree [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 579.783899] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Preparing fetch location {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 579.784184] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Creating directory with path [datastore2] vmware_temp/33bc855e-c843-4030-bfe8-f7a7c33a1ca2/cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 579.785077] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8509c374-d455-4958-a5a7-02b1313fd9b8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.806164] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Created directory with path [datastore2] vmware_temp/33bc855e-c843-4030-bfe8-f7a7c33a1ca2/cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 579.806383] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Fetch image to [datastore2] vmware_temp/33bc855e-c843-4030-bfe8-f7a7c33a1ca2/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 579.806548] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Downloading image file data cfcf6154-fe87-45d3-9aaf-2d3604c95629 to [datastore2] vmware_temp/33bc855e-c843-4030-bfe8-f7a7c33a1ca2/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk on the data store datastore2 {{(pid=68569) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 579.808110] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd95f16-b036-48fe-80bd-61b0a3912297 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.816076] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d87274-78ba-47e4-a413-7bb13292b2b8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.826918] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ce7b00-93ff-4581-a195-edf5773f20b0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.871314] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e423dc-3c8d-40be-af97-e40547903a3e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.878108] env[68569]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c864bfb6-866c-4067-9742-cf641860a40b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.971251] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Downloading image file data cfcf6154-fe87-45d3-9aaf-2d3604c95629 to the data store datastore2 {{(pid=68569) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 580.083699] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/33bc855e-c843-4030-bfe8-f7a7c33a1ca2/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68569) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 580.188714] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 580.278630] env[68569]: DEBUG nova.scheduler.client.report [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 580.425018] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Acquiring lock "98d5c760-6da3-49e3-af47-20a8054971f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 580.425018] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Lock "98d5c760-6da3-49e3-af47-20a8054971f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 580.488642] env[68569]: DEBUG nova.compute.manager [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 580.538463] env[68569]: DEBUG nova.virt.hardware [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 580.538748] env[68569]: DEBUG nova.virt.hardware [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 580.538879] env[68569]: DEBUG nova.virt.hardware [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 580.539060] env[68569]: DEBUG nova.virt.hardware [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 580.539247] env[68569]: DEBUG nova.virt.hardware [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 580.539394] env[68569]: DEBUG nova.virt.hardware [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 580.539585] env[68569]: DEBUG nova.virt.hardware [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 580.539746] env[68569]: DEBUG nova.virt.hardware [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 580.539922] env[68569]: DEBUG nova.virt.hardware [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 580.540101] env[68569]: DEBUG nova.virt.hardware [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 580.540268] env[68569]: DEBUG nova.virt.hardware [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 580.541182] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73cd0c2-5c17-4723-9a1a-59057806d70b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.552407] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2536fdc-74ae-4ede-8e91-b27cbd0a0cb7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.743471] env[68569]: DEBUG nova.network.neutron [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Successfully created port: 50f8883e-b914-4589-ac89-c1ccfb5a91de {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 580.764144] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Completed reading data from the image iterator. {{(pid=68569) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 580.764293] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/33bc855e-c843-4030-bfe8-f7a7c33a1ca2/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 580.785980] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.337s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 580.786599] env[68569]: DEBUG nova.compute.manager [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 580.792317] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.593s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 580.792504] env[68569]: INFO nova.compute.claims [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 580.899336] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Downloaded image file data cfcf6154-fe87-45d3-9aaf-2d3604c95629 to vmware_temp/33bc855e-c843-4030-bfe8-f7a7c33a1ca2/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk on the data store datastore2 {{(pid=68569) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 580.900975] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Caching image {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 580.901263] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Copying Virtual Disk [datastore2] vmware_temp/33bc855e-c843-4030-bfe8-f7a7c33a1ca2/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk to [datastore2] vmware_temp/33bc855e-c843-4030-bfe8-f7a7c33a1ca2/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 580.901539] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78159ea5-6c30-46e2-9e2d-30e466490835 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.914474] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for the task: (returnval){ [ 580.914474] env[68569]: value = "task-3166383" [ 580.914474] env[68569]: _type = "Task" [ 580.914474] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.922258] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166383, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.930773] env[68569]: DEBUG nova.compute.manager [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 581.293091] env[68569]: DEBUG nova.compute.utils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 581.295087] env[68569]: DEBUG nova.compute.manager [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 581.295087] env[68569]: DEBUG nova.network.neutron [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 581.427375] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166383, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.451304] env[68569]: DEBUG nova.policy [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57ef72430e2043b6bf41727a18993460', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '26331f741df845d5b657d7268c1c9131', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 581.464727] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.799533] env[68569]: DEBUG nova.compute.manager [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 581.930085] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166383, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.685724} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.930417] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Copied Virtual Disk [datastore2] vmware_temp/33bc855e-c843-4030-bfe8-f7a7c33a1ca2/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk to [datastore2] vmware_temp/33bc855e-c843-4030-bfe8-f7a7c33a1ca2/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 581.930601] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Deleting the datastore file [datastore2] vmware_temp/33bc855e-c843-4030-bfe8-f7a7c33a1ca2/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 581.930891] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc280b93-eee7-4ed2-980b-88b4c7b0514e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.939195] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for the task: (returnval){ [ 581.939195] env[68569]: value = "task-3166384" [ 581.939195] env[68569]: _type = "Task" [ 581.939195] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.949981] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.062153] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d301bea-3c4f-4469-a103-87cba618d804 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.071106] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20b7907-a62a-4c50-afeb-874ae919e6f4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.120986] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ba8cb1-e84a-45d1-8b87-a80217e1bb48 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.134379] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bedf37c2-3c18-4f14-872b-3c70b8cff780 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.153848] env[68569]: DEBUG nova.compute.provider_tree [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 582.451479] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024572} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.451818] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 582.451925] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Moving file from [datastore2] vmware_temp/33bc855e-c843-4030-bfe8-f7a7c33a1ca2/cfcf6154-fe87-45d3-9aaf-2d3604c95629 to [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629. {{(pid=68569) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 582.452189] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-9bf5325d-5b7d-4d1c-add3-9066df567b6b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.459992] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for the task: (returnval){ [ 582.459992] env[68569]: value = "task-3166385" [ 582.459992] env[68569]: _type = "Task" [ 582.459992] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.476342] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166385, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.477167] env[68569]: DEBUG nova.network.neutron [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Successfully created port: 8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 582.635044] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Acquiring lock "1c8dfb47-df19-4101-8d4e-30889d71d7da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 582.635044] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Lock "1c8dfb47-df19-4101-8d4e-30889d71d7da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 582.657252] env[68569]: DEBUG nova.scheduler.client.report [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 582.816085] env[68569]: DEBUG nova.compute.manager [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 582.863138] env[68569]: DEBUG nova.virt.hardware [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 582.863399] env[68569]: DEBUG nova.virt.hardware [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 582.863553] env[68569]: DEBUG nova.virt.hardware [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 582.863814] env[68569]: DEBUG nova.virt.hardware [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 582.863959] env[68569]: DEBUG nova.virt.hardware [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 582.864113] env[68569]: DEBUG nova.virt.hardware [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 582.864311] env[68569]: DEBUG nova.virt.hardware [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 582.864461] env[68569]: DEBUG nova.virt.hardware [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 582.864626] env[68569]: DEBUG nova.virt.hardware [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 582.864787] env[68569]: DEBUG nova.virt.hardware [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 582.864952] env[68569]: DEBUG nova.virt.hardware [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 582.866332] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9548c3d5-4149-40b9-9021-634134af40ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.877337] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ec51da-1cae-4961-b5a3-5590b6fb859f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.971279] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166385, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.024504} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.971279] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] File moved {{(pid=68569) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 582.972196] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Cleaning up location [datastore2] vmware_temp/33bc855e-c843-4030-bfe8-f7a7c33a1ca2 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 582.972919] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Deleting the datastore file [datastore2] vmware_temp/33bc855e-c843-4030-bfe8-f7a7c33a1ca2 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 582.973119] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd825d59-a357-4b4f-adca-f1b920590310 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.984145] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for the task: (returnval){ [ 582.984145] env[68569]: value = "task-3166386" [ 582.984145] env[68569]: _type = "Task" [ 582.984145] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.993166] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.162267] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.371s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 583.163199] env[68569]: DEBUG nova.compute.manager [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 583.165841] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.899s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.167312] env[68569]: INFO nova.compute.claims [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 583.412824] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.413057] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.420763] env[68569]: DEBUG nova.network.neutron [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Successfully updated port: 50f8883e-b914-4589-ac89-c1ccfb5a91de {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 583.497572] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166386, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025625} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.497833] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 583.498606] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ac82906-1cb5-4891-9ce0-7cfd0a0846df {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.508741] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for the task: (returnval){ [ 583.508741] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bdf6d4-193a-4e5f-13c7-32d106ffb938" [ 583.508741] env[68569]: _type = "Task" [ 583.508741] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.516887] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bdf6d4-193a-4e5f-13c7-32d106ffb938, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.677031] env[68569]: DEBUG nova.compute.utils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 583.686963] env[68569]: DEBUG nova.compute.manager [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 583.686963] env[68569]: DEBUG nova.network.neutron [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 583.812875] env[68569]: DEBUG nova.policy [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05b597e6bdda4972bd8bf5f19a95f60d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b67f0839bcd4dcbb1871cfe63cdcaac', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 583.921558] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "refresh_cache-5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.921815] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquired lock "refresh_cache-5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 583.921892] env[68569]: DEBUG nova.network.neutron [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 584.020391] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bdf6d4-193a-4e5f-13c7-32d106ffb938, 'name': SearchDatastore_Task, 'duration_secs': 0.009708} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.020716] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 584.021014] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 2c47ef2c-873a-4cb3-9a36-aa2155911b6e/2c47ef2c-873a-4cb3-9a36-aa2155911b6e.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 584.021326] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-204ff0da-54fb-464c-9807-476a05ad8cd5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.028521] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for the task: (returnval){ [ 584.028521] env[68569]: value = "task-3166387" [ 584.028521] env[68569]: _type = "Task" [ 584.028521] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.041781] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166387, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.185774] env[68569]: DEBUG nova.compute.manager [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 584.457671] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e91e88ed-383c-499d-9b8d-d0c65be7ddaa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.472140] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94f6cc5-09eb-41b3-ad17-82b3c6808b14 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.511859] env[68569]: DEBUG nova.network.neutron [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.514909] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f55d388-0022-41a5-a29a-ec12b1797da0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.524093] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684c0c99-664f-46a4-ac73-48c9d95d0e8f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.542537] env[68569]: DEBUG nova.compute.provider_tree [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 584.549029] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166387, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496091} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.549284] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 2c47ef2c-873a-4cb3-9a36-aa2155911b6e/2c47ef2c-873a-4cb3-9a36-aa2155911b6e.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 584.549494] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 584.549730] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-103cf997-6876-47da-a4ff-a30db31d2e26 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.555520] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for the task: (returnval){ [ 584.555520] env[68569]: value = "task-3166388" [ 584.555520] env[68569]: _type = "Task" [ 584.555520] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.565475] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166388, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.891193] env[68569]: DEBUG nova.network.neutron [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Successfully created port: f82cdc68-b219-4ba1-8786-d0f6ca06da82 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 585.047595] env[68569]: DEBUG nova.scheduler.client.report [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 585.068074] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166388, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066316} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.072035] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 585.074159] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262680f6-5239-44f6-afe6-aab51edc1529 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.114658] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 2c47ef2c-873a-4cb3-9a36-aa2155911b6e/2c47ef2c-873a-4cb3-9a36-aa2155911b6e.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 585.114658] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1235a36a-5dd0-48ef-9f42-599b3a826f00 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.134196] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for the task: (returnval){ [ 585.134196] env[68569]: value = "task-3166389" [ 585.134196] env[68569]: _type = "Task" [ 585.134196] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.145968] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166389, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.167779] env[68569]: DEBUG nova.compute.manager [req-b36e7cd3-c4fe-489f-abf1-3ffd7c678a73 req-533000e8-b55f-4864-852a-7c63acdc6540 service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Received event network-vif-plugged-50f8883e-b914-4589-ac89-c1ccfb5a91de {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 585.168577] env[68569]: DEBUG oslo_concurrency.lockutils [req-b36e7cd3-c4fe-489f-abf1-3ffd7c678a73 req-533000e8-b55f-4864-852a-7c63acdc6540 service nova] Acquiring lock "5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 585.171239] env[68569]: DEBUG oslo_concurrency.lockutils [req-b36e7cd3-c4fe-489f-abf1-3ffd7c678a73 req-533000e8-b55f-4864-852a-7c63acdc6540 service nova] Lock "5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 585.171239] env[68569]: DEBUG oslo_concurrency.lockutils [req-b36e7cd3-c4fe-489f-abf1-3ffd7c678a73 req-533000e8-b55f-4864-852a-7c63acdc6540 service nova] Lock "5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 585.171239] env[68569]: DEBUG nova.compute.manager [req-b36e7cd3-c4fe-489f-abf1-3ffd7c678a73 req-533000e8-b55f-4864-852a-7c63acdc6540 service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] No waiting events found dispatching network-vif-plugged-50f8883e-b914-4589-ac89-c1ccfb5a91de {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 585.171239] env[68569]: WARNING nova.compute.manager [req-b36e7cd3-c4fe-489f-abf1-3ffd7c678a73 req-533000e8-b55f-4864-852a-7c63acdc6540 service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Received unexpected event network-vif-plugged-50f8883e-b914-4589-ac89-c1ccfb5a91de for instance with vm_state building and task_state spawning. [ 585.200787] env[68569]: DEBUG nova.compute.manager [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 585.244839] env[68569]: DEBUG nova.virt.hardware [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 585.245095] env[68569]: DEBUG nova.virt.hardware [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 585.245244] env[68569]: DEBUG nova.virt.hardware [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 585.245417] env[68569]: DEBUG nova.virt.hardware [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 585.245554] env[68569]: DEBUG nova.virt.hardware [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 585.245690] env[68569]: DEBUG nova.virt.hardware [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 585.246065] env[68569]: DEBUG nova.virt.hardware [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 585.249785] env[68569]: DEBUG nova.virt.hardware [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 585.249785] env[68569]: DEBUG nova.virt.hardware [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 585.251476] env[68569]: DEBUG nova.virt.hardware [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 585.251715] env[68569]: DEBUG nova.virt.hardware [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 585.252645] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b53f24-863e-42d6-a307-ca19e75e8d3c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.262948] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3af925-b662-47f6-ab8f-1158c36d5ba5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.284113] env[68569]: DEBUG nova.network.neutron [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Updating instance_info_cache with network_info: [{"id": "50f8883e-b914-4589-ac89-c1ccfb5a91de", "address": "fa:16:3e:b3:b1:43", "network": {"id": "5f571618-9e86-414b-9df9-ef4f3e050fda", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-716941586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef27055c27a04f7e9199b9c02efa7fcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50f8883e-b9", "ovs_interfaceid": "50f8883e-b914-4589-ac89-c1ccfb5a91de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.443964] env[68569]: DEBUG nova.network.neutron [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Successfully updated port: 8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 585.555479] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 585.556051] env[68569]: DEBUG nova.compute.manager [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 585.567332] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.931s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 585.567332] env[68569]: INFO nova.compute.claims [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 585.645893] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166389, 'name': ReconfigVM_Task, 'duration_secs': 0.289989} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.645977] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 2c47ef2c-873a-4cb3-9a36-aa2155911b6e/2c47ef2c-873a-4cb3-9a36-aa2155911b6e.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 585.646997] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f29e48c9-76e3-4cf0-b255-d65fb8f7868d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.655586] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for the task: (returnval){ [ 585.655586] env[68569]: value = "task-3166390" [ 585.655586] env[68569]: _type = "Task" [ 585.655586] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.666252] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166390, 'name': Rename_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.787270] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Releasing lock "refresh_cache-5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 585.787591] env[68569]: DEBUG nova.compute.manager [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Instance network_info: |[{"id": "50f8883e-b914-4589-ac89-c1ccfb5a91de", "address": "fa:16:3e:b3:b1:43", "network": {"id": "5f571618-9e86-414b-9df9-ef4f3e050fda", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-716941586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef27055c27a04f7e9199b9c02efa7fcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50f8883e-b9", "ovs_interfaceid": "50f8883e-b914-4589-ac89-c1ccfb5a91de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 585.788083] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:b1:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b4c5eb94-841c-4713-985a-8fc4117fbaf1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50f8883e-b914-4589-ac89-c1ccfb5a91de', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 585.801090] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Creating folder: Project (ef27055c27a04f7e9199b9c02efa7fcf). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 585.802631] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-feb8d831-038a-4eb3-bb7c-8fd2ba9a0308 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.816042] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Created folder: Project (ef27055c27a04f7e9199b9c02efa7fcf) in parent group-v633430. [ 585.816542] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Creating folder: Instances. Parent ref: group-v633434. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 585.816542] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-29af77ca-975c-431c-b790-190aba9f35d0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.827451] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Created folder: Instances in parent group-v633434. [ 585.827726] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 585.827934] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 585.828197] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a81291a-054a-4048-baf5-1f3cbb6c4b22 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.854452] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 585.854452] env[68569]: value = "task-3166393" [ 585.854452] env[68569]: _type = "Task" [ 585.854452] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.866160] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166393, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.947113] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "refresh_cache-3ee3365b-0799-414b-b2a1-1d219bd9db96" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.947314] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquired lock "refresh_cache-3ee3365b-0799-414b-b2a1-1d219bd9db96" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 585.947425] env[68569]: DEBUG nova.network.neutron [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 586.068639] env[68569]: DEBUG nova.compute.utils [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 586.075032] env[68569]: DEBUG nova.compute.manager [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Not allocating networking since 'none' was specified. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 586.169217] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166390, 'name': Rename_Task, 'duration_secs': 0.144264} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.169553] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 586.169736] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f844fb7-e62e-4095-9b27-9888e645e85c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.183832] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for the task: (returnval){ [ 586.183832] env[68569]: value = "task-3166394" [ 586.183832] env[68569]: _type = "Task" [ 586.183832] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.194643] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166394, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.370870] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166393, 'name': CreateVM_Task, 'duration_secs': 0.366869} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.371068] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 586.383904] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.384108] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 586.384770] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 586.385089] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-196e3d6c-8693-4466-ac3f-6cc10a7a5bb4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.394869] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 586.394869] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52306b6b-5d03-8fa3-bb31-61c07b2f679f" [ 586.394869] env[68569]: _type = "Task" [ 586.394869] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.403670] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52306b6b-5d03-8fa3-bb31-61c07b2f679f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.575934] env[68569]: DEBUG nova.compute.manager [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 586.609029] env[68569]: DEBUG nova.network.neutron [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.696834] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166394, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.864374] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6827580c-647f-4028-a740-4f4cfda2ddfb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.873369] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15468d82-f33d-4a55-9c3a-6fb82bd88b47 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.909409] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e324bca-3932-42b8-8cfa-1432d04ba391 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.922209] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52306b6b-5d03-8fa3-bb31-61c07b2f679f, 'name': SearchDatastore_Task, 'duration_secs': 0.013718} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.923185] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 586.923185] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 586.923185] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.923185] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 586.923455] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 586.924733] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54987c96-5ab9-443b-857a-0d6ac7e778f0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.931029] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1219523b-448b-4e32-9de7-717348e5f3df {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.947023] env[68569]: DEBUG nova.compute.provider_tree [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 586.947662] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 586.947862] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 586.948850] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3b3877e-a312-4c58-906f-5bb0c748cee5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.954609] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 586.954609] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526b9fc1-30b9-4f4a-cd95-70c0369c3c91" [ 586.954609] env[68569]: _type = "Task" [ 586.954609] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.967540] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526b9fc1-30b9-4f4a-cd95-70c0369c3c91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.060038] env[68569]: DEBUG nova.network.neutron [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Updating instance_info_cache with network_info: [{"id": "8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23", "address": "fa:16:3e:5e:d2:47", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8713f6a0-21", "ovs_interfaceid": "8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.201154] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166394, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.373280] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.373594] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.373799] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.373975] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.374161] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.374349] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.374568] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.423794] env[68569]: DEBUG nova.network.neutron [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Successfully updated port: f82cdc68-b219-4ba1-8786-d0f6ca06da82 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 587.449815] env[68569]: DEBUG nova.scheduler.client.report [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 587.469255] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526b9fc1-30b9-4f4a-cd95-70c0369c3c91, 'name': SearchDatastore_Task, 'duration_secs': 0.010904} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.470102] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05dbfc4d-4fb7-4fc0-a523-0a561d5434d1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.476157] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 587.476157] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d7c241-8d1d-64c0-6d28-211ec3f53af7" [ 587.476157] env[68569]: _type = "Task" [ 587.476157] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.484935] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d7c241-8d1d-64c0-6d28-211ec3f53af7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.562529] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Releasing lock "refresh_cache-3ee3365b-0799-414b-b2a1-1d219bd9db96" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 587.562866] env[68569]: DEBUG nova.compute.manager [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Instance network_info: |[{"id": "8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23", "address": "fa:16:3e:5e:d2:47", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8713f6a0-21", "ovs_interfaceid": "8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 587.563854] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:d2:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 587.574025] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Creating folder: Project (26331f741df845d5b657d7268c1c9131). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 587.574025] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67810af1-d875-48ad-8362-771fa9e251f6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.587018] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Created folder: Project (26331f741df845d5b657d7268c1c9131) in parent group-v633430. [ 587.587018] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Creating folder: Instances. Parent ref: group-v633437. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 587.587018] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb3891f9-7dc2-4c95-9e2c-6d2091e13a8a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.595853] env[68569]: DEBUG nova.compute.manager [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 587.598484] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Created folder: Instances in parent group-v633437. [ 587.599188] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 587.599340] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 587.599563] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59cbb2b8-8d45-4c7a-928b-9061e205662a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.620945] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 587.620945] env[68569]: value = "task-3166397" [ 587.620945] env[68569]: _type = "Task" [ 587.620945] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.630817] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166397, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.637049] env[68569]: DEBUG nova.virt.hardware [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 587.637346] env[68569]: DEBUG nova.virt.hardware [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 587.637521] env[68569]: DEBUG nova.virt.hardware [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 587.637865] env[68569]: DEBUG nova.virt.hardware [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 587.638359] env[68569]: DEBUG nova.virt.hardware [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 587.644390] env[68569]: DEBUG nova.virt.hardware [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 587.644390] env[68569]: DEBUG nova.virt.hardware [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 587.644390] env[68569]: DEBUG nova.virt.hardware [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 587.644390] env[68569]: DEBUG nova.virt.hardware [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 587.644390] env[68569]: DEBUG nova.virt.hardware [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 587.644809] env[68569]: DEBUG nova.virt.hardware [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 587.644809] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e0912f-2dbd-46d8-8632-2e6b67638084 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.652873] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d223c45-a261-480c-8352-948a35d71d57 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.670629] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 587.676489] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Creating folder: Project (8c2f344527c040c58f857513e49e198c). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 587.677236] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-18e5d877-f9a0-468f-9742-b80744bd8e8e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.685420] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Created folder: Project (8c2f344527c040c58f857513e49e198c) in parent group-v633430. [ 587.685602] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Creating folder: Instances. Parent ref: group-v633440. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 587.685836] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da49a89a-3385-44d3-8119-fb97162775b8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.696360] env[68569]: DEBUG oslo_vmware.api [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166394, 'name': PowerOnVM_Task, 'duration_secs': 1.048284} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.699025] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 587.699025] env[68569]: INFO nova.compute.manager [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Took 10.14 seconds to spawn the instance on the hypervisor. [ 587.699163] env[68569]: DEBUG nova.compute.manager [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 587.699848] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Created folder: Instances in parent group-v633440. [ 587.700578] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 587.701287] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e0e74b-2ccb-4528-bec4-9d6fe4aeca2c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.704842] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 587.705223] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e37715fb-af6d-4912-9375-1820a9beef60 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.729258] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 587.729258] env[68569]: value = "task-3166400" [ 587.729258] env[68569]: _type = "Task" [ 587.729258] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.739850] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166400, 'name': CreateVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.884062] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Getting list of instances from cluster (obj){ [ 587.884062] env[68569]: value = "domain-c8" [ 587.884062] env[68569]: _type = "ClusterComputeResource" [ 587.884062] env[68569]: } {{(pid=68569) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 587.886506] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9192c487-3ecf-40b4-a418-fcb4c58e8ec7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.900366] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Got total of 2 instances {{(pid=68569) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 587.900366] env[68569]: WARNING nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] While synchronizing instance power states, found 6 instances in the database and 2 instances on the hypervisor. [ 587.900366] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid 2c47ef2c-873a-4cb3-9a36-aa2155911b6e {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 587.900366] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155 {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 587.900366] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid 3ee3365b-0799-414b-b2a1-1d219bd9db96 {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 587.900819] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid d6c45731-d76a-46cf-9b7d-be035a200948 {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 587.901044] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid 7693ef68-d7e5-4899-9615-9f2a1dd0bce8 {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 587.901330] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 587.902255] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "2c47ef2c-873a-4cb3-9a36-aa2155911b6e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.902988] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.902988] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "3ee3365b-0799-414b-b2a1-1d219bd9db96" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.903583] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "d6c45731-d76a-46cf-9b7d-be035a200948" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.903795] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "7693ef68-d7e5-4899-9615-9f2a1dd0bce8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.904063] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "87f6c6d0-4e3d-4608-bd0d-4771bc9c174a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.904603] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.904954] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68569) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11160}} [ 587.912562] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.929031] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Acquiring lock "refresh_cache-d6c45731-d76a-46cf-9b7d-be035a200948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.929150] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Acquired lock "refresh_cache-d6c45731-d76a-46cf-9b7d-be035a200948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 587.929789] env[68569]: DEBUG nova.network.neutron [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 587.955583] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.394s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 587.956829] env[68569]: DEBUG nova.compute.manager [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 587.960470] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.656s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 587.965531] env[68569]: INFO nova.compute.claims [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 587.974290] env[68569]: DEBUG nova.compute.manager [req-18c58862-2a39-4a55-92ab-1d2e486c5819 req-e772fd88-d000-4d3a-b487-fc2e4bea4751 service nova] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Received event network-vif-plugged-8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 587.974548] env[68569]: DEBUG oslo_concurrency.lockutils [req-18c58862-2a39-4a55-92ab-1d2e486c5819 req-e772fd88-d000-4d3a-b487-fc2e4bea4751 service nova] Acquiring lock "3ee3365b-0799-414b-b2a1-1d219bd9db96-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.974901] env[68569]: DEBUG oslo_concurrency.lockutils [req-18c58862-2a39-4a55-92ab-1d2e486c5819 req-e772fd88-d000-4d3a-b487-fc2e4bea4751 service nova] Lock "3ee3365b-0799-414b-b2a1-1d219bd9db96-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 587.975420] env[68569]: DEBUG oslo_concurrency.lockutils [req-18c58862-2a39-4a55-92ab-1d2e486c5819 req-e772fd88-d000-4d3a-b487-fc2e4bea4751 service nova] Lock "3ee3365b-0799-414b-b2a1-1d219bd9db96-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 587.975683] env[68569]: DEBUG nova.compute.manager [req-18c58862-2a39-4a55-92ab-1d2e486c5819 req-e772fd88-d000-4d3a-b487-fc2e4bea4751 service nova] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] No waiting events found dispatching network-vif-plugged-8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 587.975897] env[68569]: WARNING nova.compute.manager [req-18c58862-2a39-4a55-92ab-1d2e486c5819 req-e772fd88-d000-4d3a-b487-fc2e4bea4751 service nova] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Received unexpected event network-vif-plugged-8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23 for instance with vm_state building and task_state spawning. [ 587.991478] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d7c241-8d1d-64c0-6d28-211ec3f53af7, 'name': SearchDatastore_Task, 'duration_secs': 0.008729} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.991887] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 587.992205] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155/5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 587.993178] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff54c283-fe92-4f11-bc95-5758bc9935f0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.001745] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 588.001745] env[68569]: value = "task-3166401" [ 588.001745] env[68569]: _type = "Task" [ 588.001745] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.012172] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166401, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.135824] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166397, 'name': CreateVM_Task, 'duration_secs': 0.353175} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.135824] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 588.138337] env[68569]: DEBUG oslo_vmware.service [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa80ccb-6586-40e3-91cf-cf2d2cc7ebb7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.144350] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.144573] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 588.145054] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 588.145851] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2cb1a63-2c47-4a1e-a5b8-0c30a3bfcd0e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.153730] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 588.153730] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ec1a74-8a21-bfd3-a7a8-8b81e5545a75" [ 588.153730] env[68569]: _type = "Task" [ 588.153730] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.161059] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ec1a74-8a21-bfd3-a7a8-8b81e5545a75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.249310] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166400, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.249310] env[68569]: INFO nova.compute.manager [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Took 14.93 seconds to build instance. [ 588.419038] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 588.475942] env[68569]: DEBUG nova.compute.utils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 588.482996] env[68569]: DEBUG nova.compute.manager [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 588.483207] env[68569]: DEBUG nova.network.neutron [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 588.503676] env[68569]: DEBUG nova.network.neutron [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 588.520472] env[68569]: DEBUG nova.compute.manager [req-a716b751-003f-4b88-8f29-cf6e739c7ee8 req-9da5399e-564a-43df-8ce0-29348a7e74a7 service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Received event network-changed-50f8883e-b914-4589-ac89-c1ccfb5a91de {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 588.520472] env[68569]: DEBUG nova.compute.manager [req-a716b751-003f-4b88-8f29-cf6e739c7ee8 req-9da5399e-564a-43df-8ce0-29348a7e74a7 service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Refreshing instance network info cache due to event network-changed-50f8883e-b914-4589-ac89-c1ccfb5a91de. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 588.520472] env[68569]: DEBUG oslo_concurrency.lockutils [req-a716b751-003f-4b88-8f29-cf6e739c7ee8 req-9da5399e-564a-43df-8ce0-29348a7e74a7 service nova] Acquiring lock "refresh_cache-5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.520472] env[68569]: DEBUG oslo_concurrency.lockutils [req-a716b751-003f-4b88-8f29-cf6e739c7ee8 req-9da5399e-564a-43df-8ce0-29348a7e74a7 service nova] Acquired lock "refresh_cache-5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 588.520472] env[68569]: DEBUG nova.network.neutron [req-a716b751-003f-4b88-8f29-cf6e739c7ee8 req-9da5399e-564a-43df-8ce0-29348a7e74a7 service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Refreshing network info cache for port 50f8883e-b914-4589-ac89-c1ccfb5a91de {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 588.530677] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166401, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497214} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.530677] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155/5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 588.530677] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 588.530677] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87514c24-225b-43c4-9888-f5746c85310a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.541713] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 588.541713] env[68569]: value = "task-3166402" [ 588.541713] env[68569]: _type = "Task" [ 588.541713] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.553201] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166402, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.632951] env[68569]: DEBUG nova.policy [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd2762206641740db911bb37f706e754a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94334816cb6c442c8a06f3bd8917655b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 588.669203] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 588.669485] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 588.669730] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.669876] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 588.670068] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 588.670328] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f107fe4d-0b11-4405-889b-a3ecf8731adb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.681146] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 588.681146] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 588.681146] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf7e52e-eaaf-4a32-8fa3-c41fc7afe6ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.689160] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7772073b-9e6c-4148-a4a7-c8cffdeb4bb0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.693988] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 588.693988] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521353fc-b96e-4567-1309-5378ead01c15" [ 588.693988] env[68569]: _type = "Task" [ 588.693988] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.702978] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521353fc-b96e-4567-1309-5378ead01c15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.748504] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166400, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.752689] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e6b0a697-1bbe-4625-8e20-9e5183c9f2c9 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Lock "2c47ef2c-873a-4cb3-9a36-aa2155911b6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.450s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 588.756034] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "2c47ef2c-873a-4cb3-9a36-aa2155911b6e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.853s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 588.756034] env[68569]: INFO nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] During sync_power_state the instance has a pending task (spawning). Skip. [ 588.756034] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "2c47ef2c-873a-4cb3-9a36-aa2155911b6e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 588.965609] env[68569]: DEBUG nova.network.neutron [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Updating instance_info_cache with network_info: [{"id": "f82cdc68-b219-4ba1-8786-d0f6ca06da82", "address": "fa:16:3e:28:9d:36", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.110", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf82cdc68-b2", "ovs_interfaceid": "f82cdc68-b219-4ba1-8786-d0f6ca06da82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.986028] env[68569]: DEBUG nova.compute.manager [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 589.055440] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166402, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065748} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.059045] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 589.060728] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99990715-e704-4740-ab39-2ca87bdaec49 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.092653] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155/5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 589.093428] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b5460e6-2f13-4154-9314-619edda0c79b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.112615] env[68569]: DEBUG nova.network.neutron [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Successfully created port: ae0d8af2-424e-4a56-8661-6162ef6ba924 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 589.123674] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 589.123674] env[68569]: value = "task-3166403" [ 589.123674] env[68569]: _type = "Task" [ 589.123674] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.131645] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166403, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.207263] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Preparing fetch location {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 589.207476] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Creating directory with path [datastore1] vmware_temp/4ce906f5-12e8-4062-9d64-ee0c10224142/cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 589.210450] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ac3686c-efe3-4bd5-9502-f7bcf18de7c5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.227877] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Created directory with path [datastore1] vmware_temp/4ce906f5-12e8-4062-9d64-ee0c10224142/cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 589.227995] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Fetch image to [datastore1] vmware_temp/4ce906f5-12e8-4062-9d64-ee0c10224142/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 589.228190] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Downloading image file data cfcf6154-fe87-45d3-9aaf-2d3604c95629 to [datastore1] vmware_temp/4ce906f5-12e8-4062-9d64-ee0c10224142/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk on the data store datastore1 {{(pid=68569) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 589.229315] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b281a2a-9cd2-4cf4-9c8d-1b59d5bf888e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.237417] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c14a0c-7fd6-455b-8cf7-8a9f9da1cdf1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.258704] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0238343a-c8c6-4229-9388-404246f19afb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.269837] env[68569]: DEBUG nova.compute.manager [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 589.274504] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166400, 'name': CreateVM_Task, 'duration_secs': 1.416963} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.274504] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 589.274504] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.274504] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 589.274726] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 589.274927] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9fa6977-ec9e-4692-b161-8b7e1d8a74bb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.312793] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4630167c-33db-4141-85de-67b76801ceeb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.319843] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Waiting for the task: (returnval){ [ 589.319843] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520c84b1-b237-cc8d-aa55-676b4635a19c" [ 589.319843] env[68569]: _type = "Task" [ 589.319843] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.326376] env[68569]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-470ab855-c683-4282-a4bb-16c4b5c98123 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.332950] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 589.333098] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 589.333655] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.384222] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d72c16a-23a6-47e0-b8fa-5af65f6f593f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.392529] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859b185d-c37b-4352-898c-bd2469d100b0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.426607] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f459f874-853c-4c19-98dd-6403a4aaa22a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.429525] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Downloading image file data cfcf6154-fe87-45d3-9aaf-2d3604c95629 to the data store datastore1 {{(pid=68569) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 589.437161] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a81caba-d7cd-49a1-946f-9ca95adf666c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.449888] env[68569]: DEBUG nova.compute.provider_tree [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.470611] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Releasing lock "refresh_cache-d6c45731-d76a-46cf-9b7d-be035a200948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 589.470611] env[68569]: DEBUG nova.compute.manager [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Instance network_info: |[{"id": "f82cdc68-b219-4ba1-8786-d0f6ca06da82", "address": "fa:16:3e:28:9d:36", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.110", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf82cdc68-b2", "ovs_interfaceid": "f82cdc68-b219-4ba1-8786-d0f6ca06da82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 589.470939] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:9d:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f82cdc68-b219-4ba1-8786-d0f6ca06da82', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 589.478770] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Creating folder: Project (3b67f0839bcd4dcbb1871cfe63cdcaac). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 589.482163] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c042cfe5-2519-458b-a715-dfcc5ae9a229 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.490692] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Created folder: Project (3b67f0839bcd4dcbb1871cfe63cdcaac) in parent group-v633430. [ 589.490905] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Creating folder: Instances. Parent ref: group-v633443. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 589.491122] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70d855e6-8460-4240-b463-f26ff5322002 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.498821] env[68569]: DEBUG oslo_vmware.rw_handles [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4ce906f5-12e8-4062-9d64-ee0c10224142/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68569) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 589.501575] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Created folder: Instances in parent group-v633443. [ 589.501797] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 589.502898] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 589.555497] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25a5e336-18dd-4c11-8624-04f7de6e209e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.572709] env[68569]: DEBUG nova.network.neutron [req-a716b751-003f-4b88-8f29-cf6e739c7ee8 req-9da5399e-564a-43df-8ce0-29348a7e74a7 service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Updated VIF entry in instance network info cache for port 50f8883e-b914-4589-ac89-c1ccfb5a91de. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 589.573500] env[68569]: DEBUG nova.network.neutron [req-a716b751-003f-4b88-8f29-cf6e739c7ee8 req-9da5399e-564a-43df-8ce0-29348a7e74a7 service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Updating instance_info_cache with network_info: [{"id": "50f8883e-b914-4589-ac89-c1ccfb5a91de", "address": "fa:16:3e:b3:b1:43", "network": {"id": "5f571618-9e86-414b-9df9-ef4f3e050fda", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-716941586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef27055c27a04f7e9199b9c02efa7fcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50f8883e-b9", "ovs_interfaceid": "50f8883e-b914-4589-ac89-c1ccfb5a91de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.582696] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 589.582696] env[68569]: value = "task-3166406" [ 589.582696] env[68569]: _type = "Task" [ 589.582696] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.594015] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166406, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.634915] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166403, 'name': ReconfigVM_Task, 'duration_secs': 0.308127} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.638394] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155/5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 589.639245] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-827597ef-5c6a-4af9-8df7-8b3a307d74f1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.645533] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 589.645533] env[68569]: value = "task-3166407" [ 589.645533] env[68569]: _type = "Task" [ 589.645533] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.659012] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166407, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.793219] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 589.954488] env[68569]: DEBUG nova.scheduler.client.report [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 590.002207] env[68569]: DEBUG nova.compute.manager [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 590.043071] env[68569]: DEBUG nova.virt.hardware [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 590.043344] env[68569]: DEBUG nova.virt.hardware [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 590.043495] env[68569]: DEBUG nova.virt.hardware [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 590.043669] env[68569]: DEBUG nova.virt.hardware [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 590.043811] env[68569]: DEBUG nova.virt.hardware [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 590.043974] env[68569]: DEBUG nova.virt.hardware [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 590.044214] env[68569]: DEBUG nova.virt.hardware [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 590.044368] env[68569]: DEBUG nova.virt.hardware [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 590.044531] env[68569]: DEBUG nova.virt.hardware [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 590.044725] env[68569]: DEBUG nova.virt.hardware [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 590.044928] env[68569]: DEBUG nova.virt.hardware [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 590.046203] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5170ac25-6e2d-4d7e-84e1-34e87cd43a69 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.060727] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136b24f2-8669-4b8d-9f24-049320f6cd7e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.092765] env[68569]: DEBUG oslo_concurrency.lockutils [req-a716b751-003f-4b88-8f29-cf6e739c7ee8 req-9da5399e-564a-43df-8ce0-29348a7e74a7 service nova] Releasing lock "refresh_cache-5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 590.110174] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166406, 'name': CreateVM_Task, 'duration_secs': 0.454809} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.111481] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 590.112298] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.112498] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 590.112878] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 590.113255] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67e7e674-c4e5-4f2f-8a8c-19dc55e4c0b7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.118427] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Waiting for the task: (returnval){ [ 590.118427] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c77032-b265-6ecd-d93c-9512f094a28b" [ 590.118427] env[68569]: _type = "Task" [ 590.118427] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.129059] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c77032-b265-6ecd-d93c-9512f094a28b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.160881] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166407, 'name': Rename_Task, 'duration_secs': 0.159073} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.163479] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 590.163889] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ca673ea-3227-4c31-b41e-340eca788295 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.172094] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 590.172094] env[68569]: value = "task-3166408" [ 590.172094] env[68569]: _type = "Task" [ 590.172094] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.181472] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166408, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.194414] env[68569]: DEBUG oslo_vmware.rw_handles [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Completed reading data from the image iterator. {{(pid=68569) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 590.194414] env[68569]: DEBUG oslo_vmware.rw_handles [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4ce906f5-12e8-4062-9d64-ee0c10224142/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 590.289653] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Acquiring lock "26625edb-06ca-48cc-aaf1-3c55a6ea942b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 590.291851] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Lock "26625edb-06ca-48cc-aaf1-3c55a6ea942b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 590.345240] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Downloaded image file data cfcf6154-fe87-45d3-9aaf-2d3604c95629 to vmware_temp/4ce906f5-12e8-4062-9d64-ee0c10224142/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk on the data store datastore1 {{(pid=68569) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 590.347650] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Caching image {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 590.348010] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Copying Virtual Disk [datastore1] vmware_temp/4ce906f5-12e8-4062-9d64-ee0c10224142/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk to [datastore1] vmware_temp/4ce906f5-12e8-4062-9d64-ee0c10224142/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 590.348329] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b35aae2e-22b8-4676-bbfd-98dab903aae5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.356298] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 590.356298] env[68569]: value = "task-3166409" [ 590.356298] env[68569]: _type = "Task" [ 590.356298] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.366960] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166409, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.459884] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.499s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 590.460342] env[68569]: DEBUG nova.compute.manager [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 590.463414] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.420s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 590.465684] env[68569]: INFO nova.compute.claims [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 590.634658] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 590.634925] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 590.635099] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.686693] env[68569]: DEBUG oslo_vmware.api [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166408, 'name': PowerOnVM_Task, 'duration_secs': 0.485004} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.687109] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 590.687379] env[68569]: INFO nova.compute.manager [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Took 10.20 seconds to spawn the instance on the hypervisor. [ 590.687630] env[68569]: DEBUG nova.compute.manager [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 590.688618] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31647843-64f3-4088-8d72-ac7b271a3f71 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.868629] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166409, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.973199] env[68569]: DEBUG nova.compute.utils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 590.978774] env[68569]: DEBUG nova.compute.manager [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 590.978983] env[68569]: DEBUG nova.network.neutron [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 591.041815] env[68569]: DEBUG nova.policy [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '48f0153c75da4790905b1f734eb447e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb7d044e2a2e4568b5c8c922b17a81ce', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 591.179750] env[68569]: DEBUG nova.network.neutron [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Successfully updated port: ae0d8af2-424e-4a56-8661-6162ef6ba924 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 591.211619] env[68569]: INFO nova.compute.manager [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Took 15.11 seconds to build instance. [ 591.295840] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "e77cc179-1f3d-4095-a491-48df7f79bdb9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 591.296161] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "e77cc179-1f3d-4095-a491-48df7f79bdb9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 591.370268] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166409, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.723428} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.370786] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Copied Virtual Disk [datastore1] vmware_temp/4ce906f5-12e8-4062-9d64-ee0c10224142/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk to [datastore1] vmware_temp/4ce906f5-12e8-4062-9d64-ee0c10224142/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 591.371114] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Deleting the datastore file [datastore1] vmware_temp/4ce906f5-12e8-4062-9d64-ee0c10224142/cfcf6154-fe87-45d3-9aaf-2d3604c95629/tmp-sparse.vmdk {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 591.371643] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16b1b29d-7e68-4380-80e8-5229b46c0643 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.380250] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 591.380250] env[68569]: value = "task-3166410" [ 591.380250] env[68569]: _type = "Task" [ 591.380250] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.393347] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166410, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.442586] env[68569]: DEBUG nova.network.neutron [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Successfully created port: a41c265a-33e8-416e-bf15-87c720e60d25 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 591.481941] env[68569]: DEBUG nova.compute.manager [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 591.680452] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquiring lock "refresh_cache-87f6c6d0-4e3d-4608-bd0d-4771bc9c174a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.680593] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquired lock "refresh_cache-87f6c6d0-4e3d-4608-bd0d-4771bc9c174a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 591.680737] env[68569]: DEBUG nova.network.neutron [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 591.706856] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a129e99-6b9a-4e5d-b9e4-29f40a1be058 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.715473] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b2660e-a300-40d0-9a73-aa5c747fedc3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.719105] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dda05917-77d2-4d1c-bedc-7404d3537e69 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.627s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 591.721379] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.818s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 591.721599] env[68569]: INFO nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] During sync_power_state the instance has a pending task (spawning). Skip. [ 591.721928] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 591.758238] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3927c2c-81f5-4d50-ae4a-a3a3d63ebb3a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.760378] env[68569]: DEBUG nova.compute.manager [req-b7b43d07-1f32-4e7a-81fa-5c6f3d17b9f0 req-27a5cd4b-3fe2-4656-bb0f-9b8a49f00eab service nova] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Received event network-changed-8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 591.760547] env[68569]: DEBUG nova.compute.manager [req-b7b43d07-1f32-4e7a-81fa-5c6f3d17b9f0 req-27a5cd4b-3fe2-4656-bb0f-9b8a49f00eab service nova] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Refreshing instance network info cache due to event network-changed-8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 591.760753] env[68569]: DEBUG oslo_concurrency.lockutils [req-b7b43d07-1f32-4e7a-81fa-5c6f3d17b9f0 req-27a5cd4b-3fe2-4656-bb0f-9b8a49f00eab service nova] Acquiring lock "refresh_cache-3ee3365b-0799-414b-b2a1-1d219bd9db96" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.760887] env[68569]: DEBUG oslo_concurrency.lockutils [req-b7b43d07-1f32-4e7a-81fa-5c6f3d17b9f0 req-27a5cd4b-3fe2-4656-bb0f-9b8a49f00eab service nova] Acquired lock "refresh_cache-3ee3365b-0799-414b-b2a1-1d219bd9db96" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 591.761068] env[68569]: DEBUG nova.network.neutron [req-b7b43d07-1f32-4e7a-81fa-5c6f3d17b9f0 req-27a5cd4b-3fe2-4656-bb0f-9b8a49f00eab service nova] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Refreshing network info cache for port 8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 591.771672] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a51e7c2-d2b5-4172-8f33-59c71de9de9d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.785815] env[68569]: DEBUG nova.compute.provider_tree [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.890470] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166410, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.029219} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.890720] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 591.890927] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Moving file from [datastore1] vmware_temp/4ce906f5-12e8-4062-9d64-ee0c10224142/cfcf6154-fe87-45d3-9aaf-2d3604c95629 to [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629. {{(pid=68569) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 591.891610] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-ba761af7-d9fc-4c62-96e3-057158f54f99 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.898383] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 591.898383] env[68569]: value = "task-3166411" [ 591.898383] env[68569]: _type = "Task" [ 591.898383] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.906654] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166411, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.065392] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Acquiring lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.065612] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.075257] env[68569]: DEBUG nova.compute.manager [None req-2a8c7403-60f0-46c1-aef4-8464e89bc0ec tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 592.076551] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8edb0bd1-493e-490c-91ef-fcf8ad20e447 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.225814] env[68569]: DEBUG nova.compute.manager [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 592.234352] env[68569]: DEBUG nova.network.neutron [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 592.288750] env[68569]: DEBUG nova.scheduler.client.report [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 592.409144] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166411, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.025037} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.410833] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] File moved {{(pid=68569) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 592.410833] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Cleaning up location [datastore1] vmware_temp/4ce906f5-12e8-4062-9d64-ee0c10224142 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 592.410833] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Deleting the datastore file [datastore1] vmware_temp/4ce906f5-12e8-4062-9d64-ee0c10224142 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 592.410833] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1737dc1-eeca-4067-8229-9232b328cd74 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.416109] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 592.416109] env[68569]: value = "task-3166412" [ 592.416109] env[68569]: _type = "Task" [ 592.416109] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.424489] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166412, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.440955] env[68569]: DEBUG nova.compute.manager [req-3ec6aa3c-e314-482d-8e67-88ba27896f03 req-0379b7a2-3aa0-418b-b33f-33f674f64953 service nova] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Received event network-vif-plugged-f82cdc68-b219-4ba1-8786-d0f6ca06da82 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 592.441320] env[68569]: DEBUG oslo_concurrency.lockutils [req-3ec6aa3c-e314-482d-8e67-88ba27896f03 req-0379b7a2-3aa0-418b-b33f-33f674f64953 service nova] Acquiring lock "d6c45731-d76a-46cf-9b7d-be035a200948-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.441981] env[68569]: DEBUG oslo_concurrency.lockutils [req-3ec6aa3c-e314-482d-8e67-88ba27896f03 req-0379b7a2-3aa0-418b-b33f-33f674f64953 service nova] Lock "d6c45731-d76a-46cf-9b7d-be035a200948-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.442166] env[68569]: DEBUG oslo_concurrency.lockutils [req-3ec6aa3c-e314-482d-8e67-88ba27896f03 req-0379b7a2-3aa0-418b-b33f-33f674f64953 service nova] Lock "d6c45731-d76a-46cf-9b7d-be035a200948-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 592.442958] env[68569]: DEBUG nova.compute.manager [req-3ec6aa3c-e314-482d-8e67-88ba27896f03 req-0379b7a2-3aa0-418b-b33f-33f674f64953 service nova] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] No waiting events found dispatching network-vif-plugged-f82cdc68-b219-4ba1-8786-d0f6ca06da82 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 592.442958] env[68569]: WARNING nova.compute.manager [req-3ec6aa3c-e314-482d-8e67-88ba27896f03 req-0379b7a2-3aa0-418b-b33f-33f674f64953 service nova] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Received unexpected event network-vif-plugged-f82cdc68-b219-4ba1-8786-d0f6ca06da82 for instance with vm_state building and task_state spawning. [ 592.443159] env[68569]: DEBUG nova.compute.manager [req-3ec6aa3c-e314-482d-8e67-88ba27896f03 req-0379b7a2-3aa0-418b-b33f-33f674f64953 service nova] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Received event network-changed-f82cdc68-b219-4ba1-8786-d0f6ca06da82 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 592.443344] env[68569]: DEBUG nova.compute.manager [req-3ec6aa3c-e314-482d-8e67-88ba27896f03 req-0379b7a2-3aa0-418b-b33f-33f674f64953 service nova] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Refreshing instance network info cache due to event network-changed-f82cdc68-b219-4ba1-8786-d0f6ca06da82. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 592.443443] env[68569]: DEBUG oslo_concurrency.lockutils [req-3ec6aa3c-e314-482d-8e67-88ba27896f03 req-0379b7a2-3aa0-418b-b33f-33f674f64953 service nova] Acquiring lock "refresh_cache-d6c45731-d76a-46cf-9b7d-be035a200948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.443576] env[68569]: DEBUG oslo_concurrency.lockutils [req-3ec6aa3c-e314-482d-8e67-88ba27896f03 req-0379b7a2-3aa0-418b-b33f-33f674f64953 service nova] Acquired lock "refresh_cache-d6c45731-d76a-46cf-9b7d-be035a200948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 592.443726] env[68569]: DEBUG nova.network.neutron [req-3ec6aa3c-e314-482d-8e67-88ba27896f03 req-0379b7a2-3aa0-418b-b33f-33f674f64953 service nova] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Refreshing network info cache for port f82cdc68-b219-4ba1-8786-d0f6ca06da82 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 592.462509] env[68569]: DEBUG nova.network.neutron [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Updating instance_info_cache with network_info: [{"id": "ae0d8af2-424e-4a56-8661-6162ef6ba924", "address": "fa:16:3e:69:89:03", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.230", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae0d8af2-42", "ovs_interfaceid": "ae0d8af2-424e-4a56-8661-6162ef6ba924", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.494768] env[68569]: DEBUG nova.compute.manager [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 592.524289] env[68569]: DEBUG nova.virt.hardware [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 592.524555] env[68569]: DEBUG nova.virt.hardware [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 592.524723] env[68569]: DEBUG nova.virt.hardware [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 592.524906] env[68569]: DEBUG nova.virt.hardware [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 592.525838] env[68569]: DEBUG nova.virt.hardware [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 592.525838] env[68569]: DEBUG nova.virt.hardware [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 592.525838] env[68569]: DEBUG nova.virt.hardware [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 592.525838] env[68569]: DEBUG nova.virt.hardware [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 592.526342] env[68569]: DEBUG nova.virt.hardware [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 592.526342] env[68569]: DEBUG nova.virt.hardware [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 592.526476] env[68569]: DEBUG nova.virt.hardware [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 592.528992] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59599c2-3437-4300-b4b7-972ba1a2744e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.535925] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c549b5-0ea3-4bb6-8fef-f3498ad05ebe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.587073] env[68569]: INFO nova.compute.manager [None req-2a8c7403-60f0-46c1-aef4-8464e89bc0ec tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] instance snapshotting [ 592.587690] env[68569]: DEBUG nova.objects.instance [None req-2a8c7403-60f0-46c1-aef4-8464e89bc0ec tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Lazy-loading 'flavor' on Instance uuid 2c47ef2c-873a-4cb3-9a36-aa2155911b6e {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 592.759878] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.794328] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 592.794917] env[68569]: DEBUG nova.compute.manager [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 592.801781] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.614s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.803191] env[68569]: INFO nova.compute.claims [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 592.871477] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquiring lock "ad207187-634f-4e7f-9809-eb3f742ddeec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.871477] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Lock "ad207187-634f-4e7f-9809-eb3f742ddeec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.929945] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166412, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024304} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.930215] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 592.935017] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55e92974-4974-428f-a6e1-a98cf3d7e551 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.936785] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 592.936785] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52115510-c64f-9faa-e8ec-5355c2324862" [ 592.936785] env[68569]: _type = "Task" [ 592.936785] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.947762] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52115510-c64f-9faa-e8ec-5355c2324862, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.948737] env[68569]: DEBUG nova.network.neutron [req-b7b43d07-1f32-4e7a-81fa-5c6f3d17b9f0 req-27a5cd4b-3fe2-4656-bb0f-9b8a49f00eab service nova] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Updated VIF entry in instance network info cache for port 8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 592.949121] env[68569]: DEBUG nova.network.neutron [req-b7b43d07-1f32-4e7a-81fa-5c6f3d17b9f0 req-27a5cd4b-3fe2-4656-bb0f-9b8a49f00eab service nova] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Updating instance_info_cache with network_info: [{"id": "8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23", "address": "fa:16:3e:5e:d2:47", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8713f6a0-21", "ovs_interfaceid": "8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.966958] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Releasing lock "refresh_cache-87f6c6d0-4e3d-4608-bd0d-4771bc9c174a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 592.966958] env[68569]: DEBUG nova.compute.manager [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Instance network_info: |[{"id": "ae0d8af2-424e-4a56-8661-6162ef6ba924", "address": "fa:16:3e:69:89:03", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.230", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae0d8af2-42", "ovs_interfaceid": "ae0d8af2-424e-4a56-8661-6162ef6ba924", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 592.967321] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:89:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae0d8af2-424e-4a56-8661-6162ef6ba924', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 592.982329] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Creating folder: Project (94334816cb6c442c8a06f3bd8917655b). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 592.983698] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-91e07b09-546b-4248-ad34-0b98ffba6d18 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.996225] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Created folder: Project (94334816cb6c442c8a06f3bd8917655b) in parent group-v633430. [ 592.996547] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Creating folder: Instances. Parent ref: group-v633446. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 592.996901] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-011d45f0-887a-40f8-9652-b7eb95932ef8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.006329] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Created folder: Instances in parent group-v633446. [ 593.006571] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 593.006783] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 593.007008] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b173a4da-ba44-4511-acef-a6c4a789cfb4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.025964] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 593.025964] env[68569]: value = "task-3166415" [ 593.025964] env[68569]: _type = "Task" [ 593.025964] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.034634] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166415, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.093862] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32a9b2e-6c6a-409f-81e1-cfdafd5bc34f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.112758] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f879233d-dfe6-4dbf-b521-659831d94fef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.211514] env[68569]: DEBUG nova.network.neutron [req-3ec6aa3c-e314-482d-8e67-88ba27896f03 req-0379b7a2-3aa0-418b-b33f-33f674f64953 service nova] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Updated VIF entry in instance network info cache for port f82cdc68-b219-4ba1-8786-d0f6ca06da82. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 593.212936] env[68569]: DEBUG nova.network.neutron [req-3ec6aa3c-e314-482d-8e67-88ba27896f03 req-0379b7a2-3aa0-418b-b33f-33f674f64953 service nova] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Updating instance_info_cache with network_info: [{"id": "f82cdc68-b219-4ba1-8786-d0f6ca06da82", "address": "fa:16:3e:28:9d:36", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.110", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf82cdc68-b2", "ovs_interfaceid": "f82cdc68-b219-4ba1-8786-d0f6ca06da82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.313081] env[68569]: DEBUG nova.compute.utils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 593.315651] env[68569]: DEBUG nova.compute.manager [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 593.315651] env[68569]: DEBUG nova.network.neutron [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 593.378460] env[68569]: DEBUG nova.policy [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9aaa1f4a37be4a90b38d609fe0798f2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '020e9d06d0d148889f55316c2eed3c0a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 593.449611] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52115510-c64f-9faa-e8ec-5355c2324862, 'name': SearchDatastore_Task, 'duration_secs': 0.019768} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.449611] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 593.449611] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 3ee3365b-0799-414b-b2a1-1d219bd9db96/3ee3365b-0799-414b-b2a1-1d219bd9db96.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 593.449611] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 593.450036] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 593.450036] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38ae7230-f155-444f-85dc-df74e4a42eab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.452099] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3fe2ff7c-bd40-4411-a28a-658a2cb67398 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.454538] env[68569]: DEBUG oslo_concurrency.lockutils [req-b7b43d07-1f32-4e7a-81fa-5c6f3d17b9f0 req-27a5cd4b-3fe2-4656-bb0f-9b8a49f00eab service nova] Releasing lock "refresh_cache-3ee3365b-0799-414b-b2a1-1d219bd9db96" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 593.459968] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 593.459968] env[68569]: value = "task-3166416" [ 593.459968] env[68569]: _type = "Task" [ 593.459968] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.465240] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 593.465658] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 593.467686] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3d218b4-f458-4631-815c-9953964abdef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.475021] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166416, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.477347] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Waiting for the task: (returnval){ [ 593.477347] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]528604ed-8f69-1a27-0cc7-fa265b74eb36" [ 593.477347] env[68569]: _type = "Task" [ 593.477347] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.486538] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]528604ed-8f69-1a27-0cc7-fa265b74eb36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.539354] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166415, 'name': CreateVM_Task, 'duration_secs': 0.405266} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.539511] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 593.540684] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.540855] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 593.541212] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 593.541485] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2636450d-0aa3-497b-bc05-607e8c0b8349 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.547443] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 593.547443] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f505fb-5ff2-4dde-e733-28360a27ceb2" [ 593.547443] env[68569]: _type = "Task" [ 593.547443] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.556599] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f505fb-5ff2-4dde-e733-28360a27ceb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.625915] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2a8c7403-60f0-46c1-aef4-8464e89bc0ec tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 593.626942] env[68569]: DEBUG nova.network.neutron [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Successfully updated port: a41c265a-33e8-416e-bf15-87c720e60d25 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 593.628169] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6d8a74db-9520-4cb2-81a0-840703be1901 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.638227] env[68569]: DEBUG oslo_vmware.api [None req-2a8c7403-60f0-46c1-aef4-8464e89bc0ec tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for the task: (returnval){ [ 593.638227] env[68569]: value = "task-3166417" [ 593.638227] env[68569]: _type = "Task" [ 593.638227] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.650393] env[68569]: DEBUG oslo_vmware.api [None req-2a8c7403-60f0-46c1-aef4-8464e89bc0ec tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166417, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.715519] env[68569]: DEBUG oslo_concurrency.lockutils [req-3ec6aa3c-e314-482d-8e67-88ba27896f03 req-0379b7a2-3aa0-418b-b33f-33f674f64953 service nova] Releasing lock "refresh_cache-d6c45731-d76a-46cf-9b7d-be035a200948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 593.732425] env[68569]: DEBUG nova.network.neutron [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Successfully created port: 05b63b18-ef8d-4346-992c-880e73eb22d9 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 593.736171] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Acquiring lock "2c47ef2c-873a-4cb3-9a36-aa2155911b6e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 593.736557] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Lock "2c47ef2c-873a-4cb3-9a36-aa2155911b6e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 593.736907] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Acquiring lock "2c47ef2c-873a-4cb3-9a36-aa2155911b6e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 593.737108] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Lock "2c47ef2c-873a-4cb3-9a36-aa2155911b6e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 593.738162] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Lock "2c47ef2c-873a-4cb3-9a36-aa2155911b6e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 593.740792] env[68569]: INFO nova.compute.manager [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Terminating instance [ 593.817197] env[68569]: DEBUG nova.compute.manager [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 593.978416] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166416, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.990475] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]528604ed-8f69-1a27-0cc7-fa265b74eb36, 'name': SearchDatastore_Task, 'duration_secs': 0.023519} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.991503] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb1bffa7-37c2-4321-8013-c456905b0581 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.999734] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Waiting for the task: (returnval){ [ 593.999734] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d23e8c-dbcf-4f3a-6a83-f0fbed3c2287" [ 593.999734] env[68569]: _type = "Task" [ 593.999734] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.014770] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d23e8c-dbcf-4f3a-6a83-f0fbed3c2287, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.062328] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f505fb-5ff2-4dde-e733-28360a27ceb2, 'name': SearchDatastore_Task, 'duration_secs': 0.011985} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.062819] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 594.062819] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 594.063086] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.133057] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2402a948-d7ff-4066-821c-f37beb1821d6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.136797] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "refresh_cache-925400c4-3b33-4f4a-9f63-3ceec06cf0b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.136797] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquired lock "refresh_cache-925400c4-3b33-4f4a-9f63-3ceec06cf0b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 594.136920] env[68569]: DEBUG nova.network.neutron [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 594.153809] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c07db14-9515-42e6-8b32-bbd5adaea96f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.161947] env[68569]: DEBUG oslo_vmware.api [None req-2a8c7403-60f0-46c1-aef4-8464e89bc0ec tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166417, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.194457] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa900bd3-0626-4754-8842-9964dc743678 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.202871] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b68ce7-2c15-46dd-82c7-ff5b1aae9ec8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.217926] env[68569]: DEBUG nova.compute.provider_tree [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.245633] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Acquiring lock "refresh_cache-2c47ef2c-873a-4cb3-9a36-aa2155911b6e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.245830] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Acquired lock "refresh_cache-2c47ef2c-873a-4cb3-9a36-aa2155911b6e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 594.246028] env[68569]: DEBUG nova.network.neutron [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 594.474311] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166416, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566414} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.474665] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 3ee3365b-0799-414b-b2a1-1d219bd9db96/3ee3365b-0799-414b-b2a1-1d219bd9db96.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 594.474883] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 594.479348] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0e9b04b-4daf-4458-b537-b5d874c62ffc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.484463] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 594.484463] env[68569]: value = "task-3166418" [ 594.484463] env[68569]: _type = "Task" [ 594.484463] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.495525] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166418, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.519613] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d23e8c-dbcf-4f3a-6a83-f0fbed3c2287, 'name': SearchDatastore_Task, 'duration_secs': 0.015868} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.519784] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 594.520922] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 7693ef68-d7e5-4899-9615-9f2a1dd0bce8/7693ef68-d7e5-4899-9615-9f2a1dd0bce8.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 594.520922] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 594.520922] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 594.520922] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d80a2ba-a053-4b57-875d-b471800cb205 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.523754] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1a4b21e-b52c-48b8-b1f7-d0cc2708d090 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.530957] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Waiting for the task: (returnval){ [ 594.530957] env[68569]: value = "task-3166419" [ 594.530957] env[68569]: _type = "Task" [ 594.530957] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.535021] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 594.535219] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 594.536472] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ac8b748-7f67-4f7f-be16-c40e99acaebb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.541735] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166419, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.544704] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Waiting for the task: (returnval){ [ 594.544704] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524d2cfe-1730-51f0-80c9-024f9f093762" [ 594.544704] env[68569]: _type = "Task" [ 594.544704] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.553287] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524d2cfe-1730-51f0-80c9-024f9f093762, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.655378] env[68569]: DEBUG oslo_vmware.api [None req-2a8c7403-60f0-46c1-aef4-8464e89bc0ec tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166417, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.712225] env[68569]: DEBUG nova.network.neutron [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.721498] env[68569]: DEBUG nova.scheduler.client.report [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 594.799068] env[68569]: DEBUG nova.network.neutron [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.832607] env[68569]: DEBUG nova.compute.manager [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 594.865558] env[68569]: DEBUG nova.virt.hardware [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 594.866244] env[68569]: DEBUG nova.virt.hardware [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 594.866244] env[68569]: DEBUG nova.virt.hardware [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 594.866244] env[68569]: DEBUG nova.virt.hardware [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 594.866691] env[68569]: DEBUG nova.virt.hardware [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 594.866691] env[68569]: DEBUG nova.virt.hardware [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 594.866691] env[68569]: DEBUG nova.virt.hardware [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 594.866809] env[68569]: DEBUG nova.virt.hardware [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 594.866973] env[68569]: DEBUG nova.virt.hardware [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 594.867198] env[68569]: DEBUG nova.virt.hardware [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 594.867334] env[68569]: DEBUG nova.virt.hardware [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 594.868377] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad646e5c-f024-471f-92ee-3ff0228c26e9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.878190] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d40d10-677e-4a78-abff-1ca5e0ae30f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.998303] env[68569]: DEBUG nova.network.neutron [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.003663] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166418, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08865} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.007020] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 595.007020] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6828ec-d6f9-4179-b591-0e3ec45e71ae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.039524] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 3ee3365b-0799-414b-b2a1-1d219bd9db96/3ee3365b-0799-414b-b2a1-1d219bd9db96.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 595.043273] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6fcea25e-2a97-444c-a9a6-526e6e15cf5e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.073079] env[68569]: DEBUG nova.compute.manager [req-c126c191-474c-4162-a653-d93a1cb1495c req-06756200-bb28-40ea-91c4-d3b6bc2b980d service nova] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Received event network-vif-plugged-ae0d8af2-424e-4a56-8661-6162ef6ba924 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 595.073079] env[68569]: DEBUG oslo_concurrency.lockutils [req-c126c191-474c-4162-a653-d93a1cb1495c req-06756200-bb28-40ea-91c4-d3b6bc2b980d service nova] Acquiring lock "87f6c6d0-4e3d-4608-bd0d-4771bc9c174a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.073079] env[68569]: DEBUG oslo_concurrency.lockutils [req-c126c191-474c-4162-a653-d93a1cb1495c req-06756200-bb28-40ea-91c4-d3b6bc2b980d service nova] Lock "87f6c6d0-4e3d-4608-bd0d-4771bc9c174a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.073079] env[68569]: DEBUG oslo_concurrency.lockutils [req-c126c191-474c-4162-a653-d93a1cb1495c req-06756200-bb28-40ea-91c4-d3b6bc2b980d service nova] Lock "87f6c6d0-4e3d-4608-bd0d-4771bc9c174a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.073079] env[68569]: DEBUG nova.compute.manager [req-c126c191-474c-4162-a653-d93a1cb1495c req-06756200-bb28-40ea-91c4-d3b6bc2b980d service nova] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] No waiting events found dispatching network-vif-plugged-ae0d8af2-424e-4a56-8661-6162ef6ba924 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 595.073545] env[68569]: WARNING nova.compute.manager [req-c126c191-474c-4162-a653-d93a1cb1495c req-06756200-bb28-40ea-91c4-d3b6bc2b980d service nova] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Received unexpected event network-vif-plugged-ae0d8af2-424e-4a56-8661-6162ef6ba924 for instance with vm_state building and task_state spawning. [ 595.073545] env[68569]: DEBUG nova.compute.manager [req-c126c191-474c-4162-a653-d93a1cb1495c req-06756200-bb28-40ea-91c4-d3b6bc2b980d service nova] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Received event network-changed-ae0d8af2-424e-4a56-8661-6162ef6ba924 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 595.073545] env[68569]: DEBUG nova.compute.manager [req-c126c191-474c-4162-a653-d93a1cb1495c req-06756200-bb28-40ea-91c4-d3b6bc2b980d service nova] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Refreshing instance network info cache due to event network-changed-ae0d8af2-424e-4a56-8661-6162ef6ba924. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 595.073545] env[68569]: DEBUG oslo_concurrency.lockutils [req-c126c191-474c-4162-a653-d93a1cb1495c req-06756200-bb28-40ea-91c4-d3b6bc2b980d service nova] Acquiring lock "refresh_cache-87f6c6d0-4e3d-4608-bd0d-4771bc9c174a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.073545] env[68569]: DEBUG oslo_concurrency.lockutils [req-c126c191-474c-4162-a653-d93a1cb1495c req-06756200-bb28-40ea-91c4-d3b6bc2b980d service nova] Acquired lock "refresh_cache-87f6c6d0-4e3d-4608-bd0d-4771bc9c174a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 595.073695] env[68569]: DEBUG nova.network.neutron [req-c126c191-474c-4162-a653-d93a1cb1495c req-06756200-bb28-40ea-91c4-d3b6bc2b980d service nova] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Refreshing network info cache for port ae0d8af2-424e-4a56-8661-6162ef6ba924 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 595.087241] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166419, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531349} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.087540] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 595.087540] env[68569]: value = "task-3166420" [ 595.087540] env[68569]: _type = "Task" [ 595.087540] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.089292] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 7693ef68-d7e5-4899-9615-9f2a1dd0bce8/7693ef68-d7e5-4899-9615-9f2a1dd0bce8.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 595.089560] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 595.089828] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7cdb586-acfa-42f2-8486-87ad28572698 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.109229] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524d2cfe-1730-51f0-80c9-024f9f093762, 'name': SearchDatastore_Task, 'duration_secs': 0.026103} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.109570] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Waiting for the task: (returnval){ [ 595.109570] env[68569]: value = "task-3166421" [ 595.109570] env[68569]: _type = "Task" [ 595.109570] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.111359] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a951677d-ee85-4c86-b826-2633c6855bef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.120898] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Waiting for the task: (returnval){ [ 595.120898] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ba0e7f-dd7f-b966-9bed-035aabf9b2fa" [ 595.120898] env[68569]: _type = "Task" [ 595.120898] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.131769] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ba0e7f-dd7f-b966-9bed-035aabf9b2fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.133021] env[68569]: DEBUG nova.network.neutron [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Updating instance_info_cache with network_info: [{"id": "a41c265a-33e8-416e-bf15-87c720e60d25", "address": "fa:16:3e:d2:db:da", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.113", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa41c265a-33", "ovs_interfaceid": "a41c265a-33e8-416e-bf15-87c720e60d25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.153501] env[68569]: DEBUG oslo_vmware.api [None req-2a8c7403-60f0-46c1-aef4-8464e89bc0ec tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166417, 'name': CreateSnapshot_Task, 'duration_secs': 1.092345} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.153744] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2a8c7403-60f0-46c1-aef4-8464e89bc0ec tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 595.154580] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6fd859-1821-44ae-a9de-a409ccb983c7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.227805] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.228578] env[68569]: DEBUG nova.compute.manager [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 595.235082] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.770s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.236345] env[68569]: INFO nova.compute.claims [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.505868] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Releasing lock "refresh_cache-2c47ef2c-873a-4cb3-9a36-aa2155911b6e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 595.506341] env[68569]: DEBUG nova.compute.manager [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 595.506880] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 595.507522] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fbff15-cd57-4fb2-b433-985e3a7182fd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.516180] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 595.517837] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e22aa51f-570f-4ab4-a769-cf0debe43334 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.522940] env[68569]: DEBUG oslo_vmware.api [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for the task: (returnval){ [ 595.522940] env[68569]: value = "task-3166422" [ 595.522940] env[68569]: _type = "Task" [ 595.522940] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.534498] env[68569]: DEBUG oslo_vmware.api [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166422, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.604454] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166420, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.617377] env[68569]: DEBUG nova.network.neutron [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Successfully updated port: 05b63b18-ef8d-4346-992c-880e73eb22d9 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 595.623584] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166421, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104201} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.624000] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 595.628419] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86c0f30-7d4f-4eb1-9026-ebdaf7c15bc1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.650864] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Releasing lock "refresh_cache-925400c4-3b33-4f4a-9f63-3ceec06cf0b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 595.651189] env[68569]: DEBUG nova.compute.manager [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Instance network_info: |[{"id": "a41c265a-33e8-416e-bf15-87c720e60d25", "address": "fa:16:3e:d2:db:da", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.113", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa41c265a-33", "ovs_interfaceid": "a41c265a-33e8-416e-bf15-87c720e60d25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 595.663611] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 7693ef68-d7e5-4899-9615-9f2a1dd0bce8/7693ef68-d7e5-4899-9615-9f2a1dd0bce8.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 595.664746] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ba0e7f-dd7f-b966-9bed-035aabf9b2fa, 'name': SearchDatastore_Task, 'duration_secs': 0.010923} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.667394] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:db:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a41c265a-33e8-416e-bf15-87c720e60d25', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 595.676054] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Creating folder: Project (fb7d044e2a2e4568b5c8c922b17a81ce). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 595.676385] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72634470-5502-4121-b592-486498de3a70 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.695486] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 595.695486] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] d6c45731-d76a-46cf-9b7d-be035a200948/d6c45731-d76a-46cf-9b7d-be035a200948.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 595.695928] env[68569]: DEBUG nova.compute.manager [None req-2a8c7403-60f0-46c1-aef4-8464e89bc0ec tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Instance disappeared during snapshot {{(pid=68569) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 595.697810] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d75f1ea8-16b4-4335-a415-b662040ced0c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.699379] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 595.699583] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 595.700364] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd7fb787-da60-4725-81df-5cba007809f4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.704501] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-700d7960-2047-48bf-a13c-9c74197c7a64 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.708795] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Waiting for the task: (returnval){ [ 595.708795] env[68569]: value = "task-3166423" [ 595.708795] env[68569]: _type = "Task" [ 595.708795] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.715315] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Waiting for the task: (returnval){ [ 595.715315] env[68569]: value = "task-3166425" [ 595.715315] env[68569]: _type = "Task" [ 595.715315] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.717682] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Created folder: Project (fb7d044e2a2e4568b5c8c922b17a81ce) in parent group-v633430. [ 595.717682] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Creating folder: Instances. Parent ref: group-v633450. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 595.720906] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d46cd39a-06d0-4d1e-aaa7-51ed1ae12912 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.728472] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166423, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.730304] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 595.730784] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 595.735220] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06f1be78-1f40-4acc-ae5c-67866d8d43ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.738357] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166425, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.744108] env[68569]: DEBUG nova.compute.utils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 595.748315] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Created folder: Instances in parent group-v633450. [ 595.748315] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 595.749189] env[68569]: DEBUG nova.compute.manager [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 595.749358] env[68569]: DEBUG nova.network.neutron [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 595.751053] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 595.753204] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cfe465a2-642f-4bdb-8f09-a3673eb3219a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.768740] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 595.768740] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b41904-c9a4-a79f-27c7-1960a51104b5" [ 595.768740] env[68569]: _type = "Task" [ 595.768740] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.777693] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 595.777693] env[68569]: value = "task-3166427" [ 595.777693] env[68569]: _type = "Task" [ 595.777693] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.786744] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b41904-c9a4-a79f-27c7-1960a51104b5, 'name': SearchDatastore_Task, 'duration_secs': 0.009612} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.786744] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39d58116-d9c9-4a52-a903-6f52d8f794e9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.793121] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166427, 'name': CreateVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.796932] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 595.796932] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5267b4e5-94b8-c725-d004-678c9923309d" [ 595.796932] env[68569]: _type = "Task" [ 595.796932] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.806254] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5267b4e5-94b8-c725-d004-678c9923309d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.913600] env[68569]: DEBUG nova.compute.manager [None req-2a8c7403-60f0-46c1-aef4-8464e89bc0ec tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Found 0 images (rotation: 2) {{(pid=68569) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 595.962142] env[68569]: DEBUG nova.compute.manager [req-506633a0-e4e8-4711-a326-11cdc6790f51 req-54caa6c6-2368-4742-aa0f-c414e968b994 service nova] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Received event network-vif-plugged-a41c265a-33e8-416e-bf15-87c720e60d25 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 595.962142] env[68569]: DEBUG oslo_concurrency.lockutils [req-506633a0-e4e8-4711-a326-11cdc6790f51 req-54caa6c6-2368-4742-aa0f-c414e968b994 service nova] Acquiring lock "925400c4-3b33-4f4a-9f63-3ceec06cf0b7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.962142] env[68569]: DEBUG oslo_concurrency.lockutils [req-506633a0-e4e8-4711-a326-11cdc6790f51 req-54caa6c6-2368-4742-aa0f-c414e968b994 service nova] Lock "925400c4-3b33-4f4a-9f63-3ceec06cf0b7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.963860] env[68569]: DEBUG oslo_concurrency.lockutils [req-506633a0-e4e8-4711-a326-11cdc6790f51 req-54caa6c6-2368-4742-aa0f-c414e968b994 service nova] Lock "925400c4-3b33-4f4a-9f63-3ceec06cf0b7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.964224] env[68569]: DEBUG nova.compute.manager [req-506633a0-e4e8-4711-a326-11cdc6790f51 req-54caa6c6-2368-4742-aa0f-c414e968b994 service nova] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] No waiting events found dispatching network-vif-plugged-a41c265a-33e8-416e-bf15-87c720e60d25 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 595.964576] env[68569]: WARNING nova.compute.manager [req-506633a0-e4e8-4711-a326-11cdc6790f51 req-54caa6c6-2368-4742-aa0f-c414e968b994 service nova] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Received unexpected event network-vif-plugged-a41c265a-33e8-416e-bf15-87c720e60d25 for instance with vm_state building and task_state spawning. [ 595.964863] env[68569]: DEBUG nova.compute.manager [req-506633a0-e4e8-4711-a326-11cdc6790f51 req-54caa6c6-2368-4742-aa0f-c414e968b994 service nova] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Received event network-changed-a41c265a-33e8-416e-bf15-87c720e60d25 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 595.965308] env[68569]: DEBUG nova.compute.manager [req-506633a0-e4e8-4711-a326-11cdc6790f51 req-54caa6c6-2368-4742-aa0f-c414e968b994 service nova] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Refreshing instance network info cache due to event network-changed-a41c265a-33e8-416e-bf15-87c720e60d25. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 595.965604] env[68569]: DEBUG oslo_concurrency.lockutils [req-506633a0-e4e8-4711-a326-11cdc6790f51 req-54caa6c6-2368-4742-aa0f-c414e968b994 service nova] Acquiring lock "refresh_cache-925400c4-3b33-4f4a-9f63-3ceec06cf0b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.965844] env[68569]: DEBUG oslo_concurrency.lockutils [req-506633a0-e4e8-4711-a326-11cdc6790f51 req-54caa6c6-2368-4742-aa0f-c414e968b994 service nova] Acquired lock "refresh_cache-925400c4-3b33-4f4a-9f63-3ceec06cf0b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 595.966113] env[68569]: DEBUG nova.network.neutron [req-506633a0-e4e8-4711-a326-11cdc6790f51 req-54caa6c6-2368-4742-aa0f-c414e968b994 service nova] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Refreshing network info cache for port a41c265a-33e8-416e-bf15-87c720e60d25 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 595.969630] env[68569]: DEBUG nova.policy [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5f1b28cf18542e49146a64a5393d632', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '615e876a259943af8f616848d91dc87e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 596.036027] env[68569]: DEBUG oslo_vmware.api [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166422, 'name': PowerOffVM_Task, 'duration_secs': 0.181392} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.036669] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 596.036669] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 596.036807] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a87b3414-d44e-4032-b824-fa646a6dcc2d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.076105] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 596.076353] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 596.076556] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Deleting the datastore file [datastore2] 2c47ef2c-873a-4cb3-9a36-aa2155911b6e {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 596.077125] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8cde798c-7c62-4b02-9b3f-1cec1e9f0058 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.084734] env[68569]: DEBUG oslo_vmware.api [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for the task: (returnval){ [ 596.084734] env[68569]: value = "task-3166429" [ 596.084734] env[68569]: _type = "Task" [ 596.084734] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.093070] env[68569]: DEBUG oslo_vmware.api [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166429, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.106511] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166420, 'name': ReconfigVM_Task, 'duration_secs': 0.757897} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.106511] env[68569]: DEBUG nova.network.neutron [req-c126c191-474c-4162-a653-d93a1cb1495c req-06756200-bb28-40ea-91c4-d3b6bc2b980d service nova] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Updated VIF entry in instance network info cache for port ae0d8af2-424e-4a56-8661-6162ef6ba924. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 596.109836] env[68569]: DEBUG nova.network.neutron [req-c126c191-474c-4162-a653-d93a1cb1495c req-06756200-bb28-40ea-91c4-d3b6bc2b980d service nova] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Updating instance_info_cache with network_info: [{"id": "ae0d8af2-424e-4a56-8661-6162ef6ba924", "address": "fa:16:3e:69:89:03", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.230", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae0d8af2-42", "ovs_interfaceid": "ae0d8af2-424e-4a56-8661-6162ef6ba924", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.111294] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 3ee3365b-0799-414b-b2a1-1d219bd9db96/3ee3365b-0799-414b-b2a1-1d219bd9db96.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 596.114707] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-752568c6-a1ea-42bb-bbc6-712aa7770ab6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.120140] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Acquiring lock "refresh_cache-7129a57f-e639-49ae-96a9-3c1d966034a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.120229] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Acquired lock "refresh_cache-7129a57f-e639-49ae-96a9-3c1d966034a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 596.120379] env[68569]: DEBUG nova.network.neutron [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 596.122465] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 596.122465] env[68569]: value = "task-3166430" [ 596.122465] env[68569]: _type = "Task" [ 596.122465] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.131479] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166430, 'name': Rename_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.223431] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166423, 'name': ReconfigVM_Task, 'duration_secs': 0.486622} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.224098] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 7693ef68-d7e5-4899-9615-9f2a1dd0bce8/7693ef68-d7e5-4899-9615-9f2a1dd0bce8.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 596.224734] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c8caa89b-266a-496e-b734-78db2a336f29 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.229190] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166425, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.234928] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Waiting for the task: (returnval){ [ 596.234928] env[68569]: value = "task-3166431" [ 596.234928] env[68569]: _type = "Task" [ 596.234928] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.243129] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166431, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.249959] env[68569]: DEBUG nova.compute.manager [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 596.291811] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166427, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.309395] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5267b4e5-94b8-c725-d004-678c9923309d, 'name': SearchDatastore_Task, 'duration_secs': 0.011175} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.310157] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.310724] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a/87f6c6d0-4e3d-4608-bd0d-4771bc9c174a.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 596.311387] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-009d7657-b0c6-4a9b-ac41-3a49c6be2bc5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.322566] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 596.322566] env[68569]: value = "task-3166432" [ 596.322566] env[68569]: _type = "Task" [ 596.322566] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.337214] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166432, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.546577] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d32b128-234d-4203-adca-4a5645626fc5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.558499] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c53f71-142f-44ea-8e31-be8564598ca2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.600992] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59fd3a53-4f79-4fac-93c4-3a8128f3efdf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.615717] env[68569]: DEBUG oslo_concurrency.lockutils [req-c126c191-474c-4162-a653-d93a1cb1495c req-06756200-bb28-40ea-91c4-d3b6bc2b980d service nova] Releasing lock "refresh_cache-87f6c6d0-4e3d-4608-bd0d-4771bc9c174a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.615717] env[68569]: DEBUG oslo_vmware.api [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Task: {'id': task-3166429, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211524} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.615717] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a16b4ef-05f9-441a-9e28-950c8a253c1d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.620291] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 596.620291] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 596.620291] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 596.620291] env[68569]: INFO nova.compute.manager [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 596.620291] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 596.620513] env[68569]: DEBUG nova.compute.manager [-] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 596.620513] env[68569]: DEBUG nova.network.neutron [-] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 596.634850] env[68569]: DEBUG nova.compute.provider_tree [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 596.647021] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166430, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.659432] env[68569]: DEBUG nova.network.neutron [-] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.671191] env[68569]: DEBUG nova.network.neutron [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Successfully created port: 0145d676-3585-4d48-ac88-bca4be5f193a {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 596.706558] env[68569]: DEBUG nova.network.neutron [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.727872] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166425, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.743266] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166431, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.791024] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166427, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.830983] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166432, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.831814] env[68569]: DEBUG nova.network.neutron [req-506633a0-e4e8-4711-a326-11cdc6790f51 req-54caa6c6-2368-4742-aa0f-c414e968b994 service nova] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Updated VIF entry in instance network info cache for port a41c265a-33e8-416e-bf15-87c720e60d25. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 596.832136] env[68569]: DEBUG nova.network.neutron [req-506633a0-e4e8-4711-a326-11cdc6790f51 req-54caa6c6-2368-4742-aa0f-c414e968b994 service nova] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Updating instance_info_cache with network_info: [{"id": "a41c265a-33e8-416e-bf15-87c720e60d25", "address": "fa:16:3e:d2:db:da", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.113", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa41c265a-33", "ovs_interfaceid": "a41c265a-33e8-416e-bf15-87c720e60d25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.962789] env[68569]: DEBUG nova.network.neutron [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Updating instance_info_cache with network_info: [{"id": "05b63b18-ef8d-4346-992c-880e73eb22d9", "address": "fa:16:3e:b6:d6:de", "network": {"id": "880e856f-aa10-4ece-8287-93035c04f561", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-333722358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "020e9d06d0d148889f55316c2eed3c0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05b63b18-ef", "ovs_interfaceid": "05b63b18-ef8d-4346-992c-880e73eb22d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.146556] env[68569]: DEBUG nova.scheduler.client.report [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 597.150452] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166430, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.162902] env[68569]: DEBUG nova.network.neutron [-] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.232103] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166425, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.246710] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166431, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.263144] env[68569]: DEBUG nova.compute.manager [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 597.292668] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166427, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.294872] env[68569]: DEBUG nova.virt.hardware [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 597.295176] env[68569]: DEBUG nova.virt.hardware [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 597.295370] env[68569]: DEBUG nova.virt.hardware [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 597.295613] env[68569]: DEBUG nova.virt.hardware [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 597.295718] env[68569]: DEBUG nova.virt.hardware [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 597.295869] env[68569]: DEBUG nova.virt.hardware [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 597.296097] env[68569]: DEBUG nova.virt.hardware [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 597.296263] env[68569]: DEBUG nova.virt.hardware [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 597.296436] env[68569]: DEBUG nova.virt.hardware [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 597.296620] env[68569]: DEBUG nova.virt.hardware [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 597.296799] env[68569]: DEBUG nova.virt.hardware [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 597.297635] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84a1e57-adbd-4019-821c-bc2b68cac857 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.305465] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e68f15c-e294-4f56-9c5c-aafada71079c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.330913] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166432, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.334621] env[68569]: DEBUG oslo_concurrency.lockutils [req-506633a0-e4e8-4711-a326-11cdc6790f51 req-54caa6c6-2368-4742-aa0f-c414e968b994 service nova] Releasing lock "refresh_cache-925400c4-3b33-4f4a-9f63-3ceec06cf0b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.468874] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Releasing lock "refresh_cache-7129a57f-e639-49ae-96a9-3c1d966034a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.468874] env[68569]: DEBUG nova.compute.manager [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Instance network_info: |[{"id": "05b63b18-ef8d-4346-992c-880e73eb22d9", "address": "fa:16:3e:b6:d6:de", "network": {"id": "880e856f-aa10-4ece-8287-93035c04f561", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-333722358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "020e9d06d0d148889f55316c2eed3c0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05b63b18-ef", "ovs_interfaceid": "05b63b18-ef8d-4346-992c-880e73eb22d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 597.469063] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:d6:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e272539-d425-489f-9a63-aba692e88933', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05b63b18-ef8d-4346-992c-880e73eb22d9', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 597.478010] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Creating folder: Project (020e9d06d0d148889f55316c2eed3c0a). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 597.478465] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f2d3397-be26-4ea0-88e8-2a82be1ffcac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.489479] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Created folder: Project (020e9d06d0d148889f55316c2eed3c0a) in parent group-v633430. [ 597.489920] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Creating folder: Instances. Parent ref: group-v633453. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 597.490264] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d280a34c-cf14-4667-b95e-50cdb1dc9189 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.499465] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Created folder: Instances in parent group-v633453. [ 597.499700] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 597.499999] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 597.500085] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c8f5597-b6d7-471d-ba09-2d25b40b57ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.519446] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 597.519446] env[68569]: value = "task-3166435" [ 597.519446] env[68569]: _type = "Task" [ 597.519446] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.528535] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166435, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.644239] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166430, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.653381] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.419s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 597.653882] env[68569]: DEBUG nova.compute.manager [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 597.657695] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 9.239s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 597.658010] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 597.658212] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68569) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 597.658622] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.866s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 597.660408] env[68569]: INFO nova.compute.claims [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 597.665536] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933f8406-d065-411d-bae0-2064d225bac1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.670521] env[68569]: INFO nova.compute.manager [-] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Took 1.05 seconds to deallocate network for instance. [ 597.681947] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c3d312-8587-49ad-b5f9-c21d16041952 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.699062] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29f4bcb-2930-4f88-83fa-7841edf26622 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.709179] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3c79ae-6d4f-4450-b647-bbed99c5a688 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.748821] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181023MB free_disk=129GB free_vcpus=48 pci_devices=None {{(pid=68569) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 597.748989] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 597.763610] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166425, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.765852] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166431, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.793437] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166427, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.833894] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166432, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.028442] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "39a84212-2e52-4dba-b00c-5689564deaf4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 598.028689] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "39a84212-2e52-4dba-b00c-5689564deaf4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.037347] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166435, 'name': CreateVM_Task, 'duration_secs': 0.431619} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.037555] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 598.038562] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.038689] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 598.038960] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 598.039242] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-410f3ea1-2621-47da-8679-f1b784ac4a63 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.044874] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Waiting for the task: (returnval){ [ 598.044874] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5275f87b-de36-6d5c-9190-268460c465d1" [ 598.044874] env[68569]: _type = "Task" [ 598.044874] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.055132] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5275f87b-de36-6d5c-9190-268460c465d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.146465] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166430, 'name': Rename_Task, 'duration_secs': 1.731508} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.146562] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 598.146860] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72b67ea0-afbb-4292-ac59-b3c95a01d768 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.154191] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 598.154191] env[68569]: value = "task-3166436" [ 598.154191] env[68569]: _type = "Task" [ 598.154191] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.163938] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166436, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.166414] env[68569]: DEBUG nova.compute.utils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 598.167857] env[68569]: DEBUG nova.compute.manager [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 598.168262] env[68569]: DEBUG nova.network.neutron [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 598.182363] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 598.194540] env[68569]: DEBUG nova.compute.manager [req-0c1c67ad-fda3-4fd1-92db-6c5a121d631e req-c18c97a0-372d-4840-b7fa-1b5aac39afd4 service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Received event network-vif-plugged-05b63b18-ef8d-4346-992c-880e73eb22d9 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 598.197735] env[68569]: DEBUG oslo_concurrency.lockutils [req-0c1c67ad-fda3-4fd1-92db-6c5a121d631e req-c18c97a0-372d-4840-b7fa-1b5aac39afd4 service nova] Acquiring lock "7129a57f-e639-49ae-96a9-3c1d966034a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 598.197735] env[68569]: DEBUG oslo_concurrency.lockutils [req-0c1c67ad-fda3-4fd1-92db-6c5a121d631e req-c18c97a0-372d-4840-b7fa-1b5aac39afd4 service nova] Lock "7129a57f-e639-49ae-96a9-3c1d966034a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.197735] env[68569]: DEBUG oslo_concurrency.lockutils [req-0c1c67ad-fda3-4fd1-92db-6c5a121d631e req-c18c97a0-372d-4840-b7fa-1b5aac39afd4 service nova] Lock "7129a57f-e639-49ae-96a9-3c1d966034a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 598.197735] env[68569]: DEBUG nova.compute.manager [req-0c1c67ad-fda3-4fd1-92db-6c5a121d631e req-c18c97a0-372d-4840-b7fa-1b5aac39afd4 service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] No waiting events found dispatching network-vif-plugged-05b63b18-ef8d-4346-992c-880e73eb22d9 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 598.197735] env[68569]: WARNING nova.compute.manager [req-0c1c67ad-fda3-4fd1-92db-6c5a121d631e req-c18c97a0-372d-4840-b7fa-1b5aac39afd4 service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Received unexpected event network-vif-plugged-05b63b18-ef8d-4346-992c-880e73eb22d9 for instance with vm_state building and task_state spawning. [ 598.198165] env[68569]: DEBUG nova.compute.manager [req-0c1c67ad-fda3-4fd1-92db-6c5a121d631e req-c18c97a0-372d-4840-b7fa-1b5aac39afd4 service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Received event network-changed-05b63b18-ef8d-4346-992c-880e73eb22d9 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 598.198165] env[68569]: DEBUG nova.compute.manager [req-0c1c67ad-fda3-4fd1-92db-6c5a121d631e req-c18c97a0-372d-4840-b7fa-1b5aac39afd4 service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Refreshing instance network info cache due to event network-changed-05b63b18-ef8d-4346-992c-880e73eb22d9. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 598.198165] env[68569]: DEBUG oslo_concurrency.lockutils [req-0c1c67ad-fda3-4fd1-92db-6c5a121d631e req-c18c97a0-372d-4840-b7fa-1b5aac39afd4 service nova] Acquiring lock "refresh_cache-7129a57f-e639-49ae-96a9-3c1d966034a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.198165] env[68569]: DEBUG oslo_concurrency.lockutils [req-0c1c67ad-fda3-4fd1-92db-6c5a121d631e req-c18c97a0-372d-4840-b7fa-1b5aac39afd4 service nova] Acquired lock "refresh_cache-7129a57f-e639-49ae-96a9-3c1d966034a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 598.198165] env[68569]: DEBUG nova.network.neutron [req-0c1c67ad-fda3-4fd1-92db-6c5a121d631e req-c18c97a0-372d-4840-b7fa-1b5aac39afd4 service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Refreshing network info cache for port 05b63b18-ef8d-4346-992c-880e73eb22d9 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 598.243968] env[68569]: DEBUG nova.policy [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11b0937b7fda41eca4a7fda498607824', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bc479c8ce67f4cbebb03cc3df42c900d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 598.261465] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166431, 'name': Rename_Task, 'duration_secs': 1.56158} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.264827] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 598.265292] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166425, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.115722} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.265292] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f904199-f793-4006-9ef2-d56fc3e4577e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.266816] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] d6c45731-d76a-46cf-9b7d-be035a200948/d6c45731-d76a-46cf-9b7d-be035a200948.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 598.267044] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 598.267619] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c331f4f-52c3-45f7-8c62-8f6d48343bb3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.275170] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Waiting for the task: (returnval){ [ 598.275170] env[68569]: value = "task-3166438" [ 598.275170] env[68569]: _type = "Task" [ 598.275170] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.276393] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Waiting for the task: (returnval){ [ 598.276393] env[68569]: value = "task-3166437" [ 598.276393] env[68569]: _type = "Task" [ 598.276393] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.292457] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166438, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.296112] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166437, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.300247] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166427, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.334911] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166432, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.859907} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.335735] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a/87f6c6d0-4e3d-4608-bd0d-4771bc9c174a.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 598.336209] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 598.336470] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-80f85afa-d6dd-46d3-b03d-073bfcf77c83 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.342645] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 598.342645] env[68569]: value = "task-3166439" [ 598.342645] env[68569]: _type = "Task" [ 598.342645] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.350574] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166439, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.559424] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5275f87b-de36-6d5c-9190-268460c465d1, 'name': SearchDatastore_Task, 'duration_secs': 0.014692} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.559646] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 598.559917] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 598.560245] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.560289] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 598.560888] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 598.560888] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64d416af-1db6-45be-81f9-f9dfb733bc95 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.575400] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 598.575400] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 598.575635] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2697014-89b5-48f2-86c2-455efc1adace {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.584295] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Waiting for the task: (returnval){ [ 598.584295] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5228336a-2539-9ed3-a7d8-6717c8bb0ff6" [ 598.584295] env[68569]: _type = "Task" [ 598.584295] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.597566] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5228336a-2539-9ed3-a7d8-6717c8bb0ff6, 'name': SearchDatastore_Task, 'duration_secs': 0.010938} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.598429] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8fea4ad-1396-4c7c-99c9-969f1ff50bd1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.604524] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Waiting for the task: (returnval){ [ 598.604524] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52185f14-f096-7fb3-7cb7-a048e7f334af" [ 598.604524] env[68569]: _type = "Task" [ 598.604524] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.616904] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52185f14-f096-7fb3-7cb7-a048e7f334af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.651717] env[68569]: DEBUG nova.network.neutron [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Successfully updated port: 0145d676-3585-4d48-ac88-bca4be5f193a {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 598.666205] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166436, 'name': PowerOnVM_Task} progress is 1%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.669684] env[68569]: DEBUG nova.compute.manager [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 598.804592] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166438, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.150743} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.808188] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 598.808631] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166437, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.811914] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2c43a5-c967-4ec4-b913-68d1405f85e6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.814424] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166427, 'name': CreateVM_Task, 'duration_secs': 3.029083} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.814662] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 598.815598] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.815764] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 598.816073] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 598.818942] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06856ab4-1567-43f7-aaf1-e32cb5225446 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.847615] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] d6c45731-d76a-46cf-9b7d-be035a200948/d6c45731-d76a-46cf-9b7d-be035a200948.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 598.851447] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac7c31de-9c28-47fd-8173-613848e760d6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.874337] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 598.874337] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b7c715-dba9-6843-092f-dfcb90446871" [ 598.874337] env[68569]: _type = "Task" [ 598.874337] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.882067] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166439, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090248} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.884072] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 598.884524] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Waiting for the task: (returnval){ [ 598.884524] env[68569]: value = "task-3166440" [ 598.884524] env[68569]: _type = "Task" [ 598.884524] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.887778] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b7bc59-b09e-49ae-a2d1-116f03260dd3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.899779] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b7c715-dba9-6843-092f-dfcb90446871, 'name': SearchDatastore_Task, 'duration_secs': 0.011554} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.900716] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 598.901671] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 598.901671] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.901671] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 598.901671] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 598.902664] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d9adf80-c00c-4b1c-b18a-cf6c0012cd39 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.924832] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a/87f6c6d0-4e3d-4608-bd0d-4771bc9c174a.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 598.925597] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166440, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.928383] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8dc090d1-f29d-47ea-9ad6-641b3a0c87ae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.946044] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 598.946271] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 598.947595] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ed0518c-87ce-47b5-87a5-164cdb4add85 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.951365] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 598.951365] env[68569]: value = "task-3166441" [ 598.951365] env[68569]: _type = "Task" [ 598.951365] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.952669] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 598.952669] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52721fad-9cbb-7d39-6fa2-b16f126581f3" [ 598.952669] env[68569]: _type = "Task" [ 598.952669] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.961199] env[68569]: DEBUG nova.compute.manager [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Received event network-changed-50f8883e-b914-4589-ac89-c1ccfb5a91de {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 598.961481] env[68569]: DEBUG nova.compute.manager [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Refreshing instance network info cache due to event network-changed-50f8883e-b914-4589-ac89-c1ccfb5a91de. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 598.961621] env[68569]: DEBUG oslo_concurrency.lockutils [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] Acquiring lock "refresh_cache-5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.961779] env[68569]: DEBUG oslo_concurrency.lockutils [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] Acquired lock "refresh_cache-5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 598.961942] env[68569]: DEBUG nova.network.neutron [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Refreshing network info cache for port 50f8883e-b914-4589-ac89-c1ccfb5a91de {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 598.976920] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166441, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.982533] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52721fad-9cbb-7d39-6fa2-b16f126581f3, 'name': SearchDatastore_Task, 'duration_secs': 0.010807} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.983216] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-585d2193-484d-4698-b373-5c57cabc6ff6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.992711] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 598.992711] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ef615a-6bd0-5f56-666a-e053d15b7e69" [ 598.992711] env[68569]: _type = "Task" [ 598.992711] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.002032] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ef615a-6bd0-5f56-666a-e053d15b7e69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.067871] env[68569]: DEBUG nova.network.neutron [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Successfully created port: bad77068-318c-4c21-8d0f-74d1c5d8da7b {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 599.095736] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9517618-4afc-4a05-a870-81499fcd3a83 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.104274] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42508081-ea49-4eb9-82db-15269daa0945 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.119463] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52185f14-f096-7fb3-7cb7-a048e7f334af, 'name': SearchDatastore_Task, 'duration_secs': 0.012083} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.141895] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 599.142344] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 7129a57f-e639-49ae-96a9-3c1d966034a8/7129a57f-e639-49ae-96a9-3c1d966034a8.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 599.142754] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb7a3059-0b35-4898-a3de-910afd518274 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.145430] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db00ecb2-0d6d-4dd2-a788-5aa73fec88a5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.154236] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Acquiring lock "refresh_cache-8eb18d79-e164-4e66-83b0-7b40d04c30a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.154361] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Acquired lock "refresh_cache-8eb18d79-e164-4e66-83b0-7b40d04c30a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 599.154556] env[68569]: DEBUG nova.network.neutron [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 599.162188] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c373d2-b3bd-48ae-bc35-88d76d5db107 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.166934] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Waiting for the task: (returnval){ [ 599.166934] env[68569]: value = "task-3166442" [ 599.166934] env[68569]: _type = "Task" [ 599.166934] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.199480] env[68569]: DEBUG nova.compute.provider_tree [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.207737] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166436, 'name': PowerOnVM_Task} progress is 1%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.213186] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166442, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.298032] env[68569]: DEBUG oslo_vmware.api [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166437, 'name': PowerOnVM_Task, 'duration_secs': 0.766122} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.298032] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 599.298032] env[68569]: INFO nova.compute.manager [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Took 11.70 seconds to spawn the instance on the hypervisor. [ 599.298032] env[68569]: DEBUG nova.compute.manager [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 599.298774] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f39ef1-941a-48c4-a724-90dfa59ea052 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.404797] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166440, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.463509] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166441, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.503469] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ef615a-6bd0-5f56-666a-e053d15b7e69, 'name': SearchDatastore_Task, 'duration_secs': 0.01735} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.503838] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 599.504228] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 925400c4-3b33-4f4a-9f63-3ceec06cf0b7/925400c4-3b33-4f4a-9f63-3ceec06cf0b7.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 599.504538] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6336dcd4-40ef-43b8-9d7e-146937172455 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.514882] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 599.514882] env[68569]: value = "task-3166443" [ 599.514882] env[68569]: _type = "Task" [ 599.514882] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.526326] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166443, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.649315] env[68569]: DEBUG nova.network.neutron [req-0c1c67ad-fda3-4fd1-92db-6c5a121d631e req-c18c97a0-372d-4840-b7fa-1b5aac39afd4 service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Updated VIF entry in instance network info cache for port 05b63b18-ef8d-4346-992c-880e73eb22d9. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 599.649680] env[68569]: DEBUG nova.network.neutron [req-0c1c67ad-fda3-4fd1-92db-6c5a121d631e req-c18c97a0-372d-4840-b7fa-1b5aac39afd4 service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Updating instance_info_cache with network_info: [{"id": "05b63b18-ef8d-4346-992c-880e73eb22d9", "address": "fa:16:3e:b6:d6:de", "network": {"id": "880e856f-aa10-4ece-8287-93035c04f561", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-333722358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "020e9d06d0d148889f55316c2eed3c0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05b63b18-ef", "ovs_interfaceid": "05b63b18-ef8d-4346-992c-880e73eb22d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.681401] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166436, 'name': PowerOnVM_Task} progress is 64%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.691073] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166442, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.710019] env[68569]: DEBUG nova.compute.manager [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 599.711624] env[68569]: DEBUG nova.scheduler.client.report [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 599.742706] env[68569]: DEBUG nova.virt.hardware [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 599.743033] env[68569]: DEBUG nova.virt.hardware [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 599.743245] env[68569]: DEBUG nova.virt.hardware [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 599.743475] env[68569]: DEBUG nova.virt.hardware [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 599.743660] env[68569]: DEBUG nova.virt.hardware [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 599.743877] env[68569]: DEBUG nova.virt.hardware [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 599.744103] env[68569]: DEBUG nova.virt.hardware [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 599.744459] env[68569]: DEBUG nova.virt.hardware [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 599.744459] env[68569]: DEBUG nova.virt.hardware [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 599.744696] env[68569]: DEBUG nova.virt.hardware [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 599.744908] env[68569]: DEBUG nova.virt.hardware [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 599.746044] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113eff10-f72c-4926-a44d-007032d56238 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.757945] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f1fb15-b573-4ba8-8b29-fdc0ef9cfd2c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.802545] env[68569]: DEBUG nova.network.neutron [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 599.821110] env[68569]: INFO nova.compute.manager [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Took 22.59 seconds to build instance. [ 599.905477] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166440, 'name': ReconfigVM_Task, 'duration_secs': 0.561234} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.905862] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Reconfigured VM instance instance-00000004 to attach disk [datastore1] d6c45731-d76a-46cf-9b7d-be035a200948/d6c45731-d76a-46cf-9b7d-be035a200948.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 599.906436] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0d9668a4-8b32-40c1-902c-02eef73f7ec1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.913701] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Waiting for the task: (returnval){ [ 599.913701] env[68569]: value = "task-3166444" [ 599.913701] env[68569]: _type = "Task" [ 599.913701] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.924438] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166444, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.966128] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166441, 'name': ReconfigVM_Task, 'duration_secs': 0.540061} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.966539] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a/87f6c6d0-4e3d-4608-bd0d-4771bc9c174a.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 599.967261] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c28b3895-b3e4-43cc-9ea9-3a0a8544c69c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.976230] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 599.976230] env[68569]: value = "task-3166445" [ 599.976230] env[68569]: _type = "Task" [ 599.976230] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.988740] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166445, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.025982] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166443, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.153390] env[68569]: DEBUG oslo_concurrency.lockutils [req-0c1c67ad-fda3-4fd1-92db-6c5a121d631e req-c18c97a0-372d-4840-b7fa-1b5aac39afd4 service nova] Releasing lock "refresh_cache-7129a57f-e639-49ae-96a9-3c1d966034a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 600.174920] env[68569]: DEBUG oslo_vmware.api [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166436, 'name': PowerOnVM_Task, 'duration_secs': 1.83558} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.179062] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 600.179062] env[68569]: INFO nova.compute.manager [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Took 17.36 seconds to spawn the instance on the hypervisor. [ 600.179062] env[68569]: DEBUG nova.compute.manager [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 600.180223] env[68569]: DEBUG nova.network.neutron [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Updated VIF entry in instance network info cache for port 50f8883e-b914-4589-ac89-c1ccfb5a91de. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 600.180311] env[68569]: DEBUG nova.network.neutron [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Updating instance_info_cache with network_info: [{"id": "50f8883e-b914-4589-ac89-c1ccfb5a91de", "address": "fa:16:3e:b3:b1:43", "network": {"id": "5f571618-9e86-414b-9df9-ef4f3e050fda", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-716941586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef27055c27a04f7e9199b9c02efa7fcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50f8883e-b9", "ovs_interfaceid": "50f8883e-b914-4589-ac89-c1ccfb5a91de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.183158] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1791ed78-298f-4e94-9c30-8c76f4a5b46d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.197519] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166442, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593165} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.199952] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 7129a57f-e639-49ae-96a9-3c1d966034a8/7129a57f-e639-49ae-96a9-3c1d966034a8.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 600.199952] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 600.204805] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-37dd3609-ad93-4edb-953e-dd64e63e9bb4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.213349] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Waiting for the task: (returnval){ [ 600.213349] env[68569]: value = "task-3166446" [ 600.213349] env[68569]: _type = "Task" [ 600.213349] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.222264] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 600.222764] env[68569]: DEBUG nova.compute.manager [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 600.226818] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.467s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 600.228473] env[68569]: INFO nova.compute.claims [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.237883] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166446, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.294525] env[68569]: DEBUG nova.network.neutron [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Updating instance_info_cache with network_info: [{"id": "0145d676-3585-4d48-ac88-bca4be5f193a", "address": "fa:16:3e:a4:e8:da", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.172", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0145d676-35", "ovs_interfaceid": "0145d676-3585-4d48-ac88-bca4be5f193a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.323451] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0fa819e5-811f-449d-a12c-c2711c98b03f tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Lock "7693ef68-d7e5-4899-9615-9f2a1dd0bce8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.097s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 600.325401] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "7693ef68-d7e5-4899-9615-9f2a1dd0bce8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.421s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 600.325480] env[68569]: INFO nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] During sync_power_state the instance has a pending task (spawning). Skip. [ 600.325736] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "7693ef68-d7e5-4899-9615-9f2a1dd0bce8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 600.424684] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166444, 'name': Rename_Task, 'duration_secs': 0.238087} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.424999] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 600.425256] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5833d993-e1c7-4bef-ab22-f1a22435e776 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.431632] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Waiting for the task: (returnval){ [ 600.431632] env[68569]: value = "task-3166447" [ 600.431632] env[68569]: _type = "Task" [ 600.431632] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.439367] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166447, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.485342] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166445, 'name': Rename_Task, 'duration_secs': 0.234348} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.485658] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 600.485905] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de5e008c-33c5-4601-9bfa-4f552fe2708c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.492868] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 600.492868] env[68569]: value = "task-3166448" [ 600.492868] env[68569]: _type = "Task" [ 600.492868] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.502354] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166448, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.527530] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166443, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.687468} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.527999] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 925400c4-3b33-4f4a-9f63-3ceec06cf0b7/925400c4-3b33-4f4a-9f63-3ceec06cf0b7.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 600.528105] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 600.528349] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-12d75175-cc01-4306-8de3-b9f912cb206c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.534236] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 600.534236] env[68569]: value = "task-3166449" [ 600.534236] env[68569]: _type = "Task" [ 600.534236] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.542401] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166449, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.688250] env[68569]: DEBUG oslo_concurrency.lockutils [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] Releasing lock "refresh_cache-5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 600.688250] env[68569]: DEBUG nova.compute.manager [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Received event network-changed-50f8883e-b914-4589-ac89-c1ccfb5a91de {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 600.689443] env[68569]: DEBUG nova.compute.manager [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Refreshing instance network info cache due to event network-changed-50f8883e-b914-4589-ac89-c1ccfb5a91de. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 600.689443] env[68569]: DEBUG oslo_concurrency.lockutils [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] Acquiring lock "refresh_cache-5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.689443] env[68569]: DEBUG oslo_concurrency.lockutils [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] Acquired lock "refresh_cache-5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 600.689443] env[68569]: DEBUG nova.network.neutron [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Refreshing network info cache for port 50f8883e-b914-4589-ac89-c1ccfb5a91de {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 600.713578] env[68569]: INFO nova.compute.manager [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Took 24.43 seconds to build instance. [ 600.723752] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166446, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.198947} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.724052] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 600.724872] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8db23f-28b2-403c-8af0-6c921f2a3d04 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.747021] env[68569]: DEBUG nova.compute.utils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 600.753909] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 7129a57f-e639-49ae-96a9-3c1d966034a8/7129a57f-e639-49ae-96a9-3c1d966034a8.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 600.754964] env[68569]: DEBUG nova.compute.manager [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 600.755291] env[68569]: DEBUG nova.network.neutron [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 600.756904] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0748858b-76bc-4292-96a8-3bb0423a5ef0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.780346] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Waiting for the task: (returnval){ [ 600.780346] env[68569]: value = "task-3166450" [ 600.780346] env[68569]: _type = "Task" [ 600.780346] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.791690] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166450, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.796695] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Releasing lock "refresh_cache-8eb18d79-e164-4e66-83b0-7b40d04c30a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 600.797046] env[68569]: DEBUG nova.compute.manager [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Instance network_info: |[{"id": "0145d676-3585-4d48-ac88-bca4be5f193a", "address": "fa:16:3e:a4:e8:da", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.172", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0145d676-35", "ovs_interfaceid": "0145d676-3585-4d48-ac88-bca4be5f193a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 600.797437] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:e8:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0145d676-3585-4d48-ac88-bca4be5f193a', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 600.805210] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Creating folder: Project (615e876a259943af8f616848d91dc87e). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 600.805494] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bade9bd6-8842-41f3-b6a5-ad275a504b8e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.814708] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Created folder: Project (615e876a259943af8f616848d91dc87e) in parent group-v633430. [ 600.814918] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Creating folder: Instances. Parent ref: group-v633456. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 600.815185] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c8ba0e0-9fbd-4313-8984-b3fa3d9efbfd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.823670] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Created folder: Instances in parent group-v633456. [ 600.823826] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 600.824043] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 600.824254] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-df1c2e59-97fb-48d4-b409-2c78baca58e2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.839580] env[68569]: DEBUG nova.compute.manager [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 600.846513] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 600.846513] env[68569]: value = "task-3166453" [ 600.846513] env[68569]: _type = "Task" [ 600.846513] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.855240] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166453, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.941561] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166447, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.002941] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166448, 'name': PowerOnVM_Task} progress is 1%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.046380] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166449, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.054298] env[68569]: DEBUG nova.policy [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ef3f34740c64e4490836ff2bf07b568', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '95ddfacc73184dce804752ef6c9fa79f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 601.218962] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00d86aa1-4ceb-48ea-8db5-d994e3dffce7 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "3ee3365b-0799-414b-b2a1-1d219bd9db96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.939s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 601.220499] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "3ee3365b-0799-414b-b2a1-1d219bd9db96" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 13.317s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 601.220499] env[68569]: INFO nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] During sync_power_state the instance has a pending task (spawning). Skip. [ 601.220908] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "3ee3365b-0799-414b-b2a1-1d219bd9db96" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 601.260750] env[68569]: DEBUG nova.compute.manager [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 601.297328] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.361015] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 601.366700] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166453, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.445976] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166447, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.508819] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166448, 'name': PowerOnVM_Task} progress is 1%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.545588] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166449, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.970446} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.549115] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 601.550543] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f00795-d5f4-4a51-8b61-7b874fe6c590 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.576117] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 925400c4-3b33-4f4a-9f63-3ceec06cf0b7/925400c4-3b33-4f4a-9f63-3ceec06cf0b7.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 601.577181] env[68569]: DEBUG nova.network.neutron [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Updated VIF entry in instance network info cache for port 50f8883e-b914-4589-ac89-c1ccfb5a91de. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 601.577513] env[68569]: DEBUG nova.network.neutron [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Updating instance_info_cache with network_info: [{"id": "50f8883e-b914-4589-ac89-c1ccfb5a91de", "address": "fa:16:3e:b3:b1:43", "network": {"id": "5f571618-9e86-414b-9df9-ef4f3e050fda", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-716941586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef27055c27a04f7e9199b9c02efa7fcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50f8883e-b9", "ovs_interfaceid": "50f8883e-b914-4589-ac89-c1ccfb5a91de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.579848] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-399efb9d-139f-4377-9169-445c402fc745 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.595285] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbcb4969-a62f-4832-8183-8f6f2cfe70ca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.606853] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f0511f-1aae-4f20-bca4-dd7c710d00dc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.611932] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 601.611932] env[68569]: value = "task-3166454" [ 601.611932] env[68569]: _type = "Task" [ 601.611932] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.643929] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2929f7dc-8bb1-4464-8d33-d17f9f5f2348 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.650028] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.655304] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1880879-70bb-4730-9b0e-df4ea4a89e93 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.669979] env[68569]: DEBUG nova.compute.provider_tree [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.722609] env[68569]: DEBUG nova.compute.manager [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 601.793173] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166450, 'name': ReconfigVM_Task, 'duration_secs': 0.804457} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.793485] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 7129a57f-e639-49ae-96a9-3c1d966034a8/7129a57f-e639-49ae-96a9-3c1d966034a8.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 601.794220] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14c8835f-ebc4-49db-9784-9a060d555827 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.802401] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Waiting for the task: (returnval){ [ 601.802401] env[68569]: value = "task-3166455" [ 601.802401] env[68569]: _type = "Task" [ 601.802401] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.814626] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166455, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.861380] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166453, 'name': CreateVM_Task, 'duration_secs': 1.004444} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.861642] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 601.862433] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.862616] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 601.863028] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 601.863298] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63fe704e-2dc9-479c-9efc-ae7b00fd8aa7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.869134] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Waiting for the task: (returnval){ [ 601.869134] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52537e13-2142-4313-cd64-945de787f926" [ 601.869134] env[68569]: _type = "Task" [ 601.869134] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.880957] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52537e13-2142-4313-cd64-945de787f926, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.952028] env[68569]: DEBUG oslo_vmware.api [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166447, 'name': PowerOnVM_Task, 'duration_secs': 1.229124} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.952028] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 601.952028] env[68569]: INFO nova.compute.manager [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Took 16.75 seconds to spawn the instance on the hypervisor. [ 601.952028] env[68569]: DEBUG nova.compute.manager [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 601.952028] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3396d45-2462-4971-9953-49c92fcb1696 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.006719] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166448, 'name': PowerOnVM_Task} progress is 37%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.100443] env[68569]: DEBUG oslo_concurrency.lockutils [req-873410a3-660e-44b3-b093-61ec96cf9335 req-db4baae4-054b-4613-9339-e13849cbf19d service nova] Releasing lock "refresh_cache-5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.124471] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166454, 'name': ReconfigVM_Task, 'duration_secs': 0.445556} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.125202] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 925400c4-3b33-4f4a-9f63-3ceec06cf0b7/925400c4-3b33-4f4a-9f63-3ceec06cf0b7.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 602.126075] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2536255-ef8c-4d2a-b896-fa8991b6373b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.132924] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 602.132924] env[68569]: value = "task-3166456" [ 602.132924] env[68569]: _type = "Task" [ 602.132924] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.147286] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166456, 'name': Rename_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.173537] env[68569]: DEBUG nova.scheduler.client.report [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 602.251417] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.261476] env[68569]: DEBUG nova.network.neutron [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Successfully created port: 4a7f66da-42d4-4f36-9888-5bb341b4925c {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 602.279522] env[68569]: DEBUG nova.compute.manager [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 602.312024] env[68569]: DEBUG nova.virt.hardware [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 602.312024] env[68569]: DEBUG nova.virt.hardware [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.312220] env[68569]: DEBUG nova.virt.hardware [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 602.312220] env[68569]: DEBUG nova.virt.hardware [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.312766] env[68569]: DEBUG nova.virt.hardware [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 602.312766] env[68569]: DEBUG nova.virt.hardware [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 602.312877] env[68569]: DEBUG nova.virt.hardware [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 602.313045] env[68569]: DEBUG nova.virt.hardware [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 602.313136] env[68569]: DEBUG nova.virt.hardware [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 602.313290] env[68569]: DEBUG nova.virt.hardware [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 602.313453] env[68569]: DEBUG nova.virt.hardware [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 602.314294] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6bcf94-4460-4f51-8653-598fc3a91616 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.322245] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166455, 'name': Rename_Task, 'duration_secs': 0.21225} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.324389] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 602.324701] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-164d7405-c1bd-4927-a2ee-538e517b6f3b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.327269] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c182df-0925-4260-ad67-fbf70fcb50d4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.346783] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Waiting for the task: (returnval){ [ 602.346783] env[68569]: value = "task-3166457" [ 602.346783] env[68569]: _type = "Task" [ 602.346783] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.358023] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166457, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.379227] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52537e13-2142-4313-cd64-945de787f926, 'name': SearchDatastore_Task, 'duration_secs': 0.011704} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.379538] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.379775] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 602.380029] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.380179] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.380352] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 602.380606] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-739b4aba-5921-4c1f-ac5a-ded8b9adc37d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.389413] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 602.389606] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 602.391016] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33df6158-be9d-45ad-b5af-f44bc857b8c1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.397422] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Waiting for the task: (returnval){ [ 602.397422] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b88ae5-dad8-b7ab-8c0a-6baae1b01658" [ 602.397422] env[68569]: _type = "Task" [ 602.397422] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.414249] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b88ae5-dad8-b7ab-8c0a-6baae1b01658, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.470277] env[68569]: INFO nova.compute.manager [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Took 25.30 seconds to build instance. [ 602.510530] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166448, 'name': PowerOnVM_Task} progress is 82%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.645665] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166456, 'name': Rename_Task, 'duration_secs': 0.194325} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.645946] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 602.648037] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea48686d-0e7f-4a37-ba67-32df4a99c3c7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.653987] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 602.653987] env[68569]: value = "task-3166458" [ 602.653987] env[68569]: _type = "Task" [ 602.653987] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.662463] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166458, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.678570] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.679413] env[68569]: DEBUG nova.compute.manager [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 602.682602] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.934s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.734215] env[68569]: DEBUG nova.network.neutron [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Successfully updated port: bad77068-318c-4c21-8d0f-74d1c5d8da7b {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 602.862042] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166457, 'name': PowerOnVM_Task} progress is 37%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.890705] env[68569]: DEBUG nova.compute.manager [req-0ca002fd-4c23-4fa1-99b3-94eb2f765160 req-e2b5a3ab-928e-4462-82bd-91202296cd73 service nova] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Received event network-vif-plugged-0145d676-3585-4d48-ac88-bca4be5f193a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 602.890705] env[68569]: DEBUG oslo_concurrency.lockutils [req-0ca002fd-4c23-4fa1-99b3-94eb2f765160 req-e2b5a3ab-928e-4462-82bd-91202296cd73 service nova] Acquiring lock "8eb18d79-e164-4e66-83b0-7b40d04c30a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.890875] env[68569]: DEBUG oslo_concurrency.lockutils [req-0ca002fd-4c23-4fa1-99b3-94eb2f765160 req-e2b5a3ab-928e-4462-82bd-91202296cd73 service nova] Lock "8eb18d79-e164-4e66-83b0-7b40d04c30a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.891119] env[68569]: DEBUG oslo_concurrency.lockutils [req-0ca002fd-4c23-4fa1-99b3-94eb2f765160 req-e2b5a3ab-928e-4462-82bd-91202296cd73 service nova] Lock "8eb18d79-e164-4e66-83b0-7b40d04c30a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.891226] env[68569]: DEBUG nova.compute.manager [req-0ca002fd-4c23-4fa1-99b3-94eb2f765160 req-e2b5a3ab-928e-4462-82bd-91202296cd73 service nova] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] No waiting events found dispatching network-vif-plugged-0145d676-3585-4d48-ac88-bca4be5f193a {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 602.891387] env[68569]: WARNING nova.compute.manager [req-0ca002fd-4c23-4fa1-99b3-94eb2f765160 req-e2b5a3ab-928e-4462-82bd-91202296cd73 service nova] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Received unexpected event network-vif-plugged-0145d676-3585-4d48-ac88-bca4be5f193a for instance with vm_state building and task_state spawning. [ 602.891574] env[68569]: DEBUG nova.compute.manager [req-0ca002fd-4c23-4fa1-99b3-94eb2f765160 req-e2b5a3ab-928e-4462-82bd-91202296cd73 service nova] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Received event network-changed-0145d676-3585-4d48-ac88-bca4be5f193a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 602.891742] env[68569]: DEBUG nova.compute.manager [req-0ca002fd-4c23-4fa1-99b3-94eb2f765160 req-e2b5a3ab-928e-4462-82bd-91202296cd73 service nova] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Refreshing instance network info cache due to event network-changed-0145d676-3585-4d48-ac88-bca4be5f193a. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 602.891924] env[68569]: DEBUG oslo_concurrency.lockutils [req-0ca002fd-4c23-4fa1-99b3-94eb2f765160 req-e2b5a3ab-928e-4462-82bd-91202296cd73 service nova] Acquiring lock "refresh_cache-8eb18d79-e164-4e66-83b0-7b40d04c30a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.892066] env[68569]: DEBUG oslo_concurrency.lockutils [req-0ca002fd-4c23-4fa1-99b3-94eb2f765160 req-e2b5a3ab-928e-4462-82bd-91202296cd73 service nova] Acquired lock "refresh_cache-8eb18d79-e164-4e66-83b0-7b40d04c30a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.896019] env[68569]: DEBUG nova.network.neutron [req-0ca002fd-4c23-4fa1-99b3-94eb2f765160 req-e2b5a3ab-928e-4462-82bd-91202296cd73 service nova] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Refreshing network info cache for port 0145d676-3585-4d48-ac88-bca4be5f193a {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 602.909375] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b88ae5-dad8-b7ab-8c0a-6baae1b01658, 'name': SearchDatastore_Task, 'duration_secs': 0.016175} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.911082] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33354a1c-e0b9-4856-9317-264080dc19d0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.918274] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Waiting for the task: (returnval){ [ 602.918274] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5215a790-2335-999a-d7da-306e009b144e" [ 602.918274] env[68569]: _type = "Task" [ 602.918274] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.929140] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5215a790-2335-999a-d7da-306e009b144e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.974063] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6e87c620-0c31-4bc4-91e3-f5414952b270 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Lock "d6c45731-d76a-46cf-9b7d-be035a200948" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.814s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.975490] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "d6c45731-d76a-46cf-9b7d-be035a200948" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 15.072s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.976770] env[68569]: INFO nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] During sync_power_state the instance has a pending task (spawning). Skip. [ 602.976770] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "d6c45731-d76a-46cf-9b7d-be035a200948" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 603.007421] env[68569]: DEBUG oslo_vmware.api [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166448, 'name': PowerOnVM_Task, 'duration_secs': 2.16821} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.007807] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 603.008094] env[68569]: INFO nova.compute.manager [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Took 13.01 seconds to spawn the instance on the hypervisor. [ 603.008320] env[68569]: DEBUG nova.compute.manager [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 603.009469] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc644ab-97d4-4dc9-a2d8-033db79d5573 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.166642] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166458, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.187513] env[68569]: DEBUG nova.compute.utils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 603.198220] env[68569]: DEBUG nova.compute.manager [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 603.198220] env[68569]: DEBUG nova.network.neutron [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 603.239311] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Acquiring lock "refresh_cache-98d5c760-6da3-49e3-af47-20a8054971f3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.239487] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Acquired lock "refresh_cache-98d5c760-6da3-49e3-af47-20a8054971f3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.239640] env[68569]: DEBUG nova.network.neutron [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 603.306376] env[68569]: DEBUG nova.compute.manager [None req-4ba41043-b68a-4e4c-a787-e1c83a6fcea3 tempest-ServerDiagnosticsV248Test-225293956 tempest-ServerDiagnosticsV248Test-225293956-project-admin] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 603.308476] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12bd85ea-59ab-4d42-9577-1e09a728525b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.320416] env[68569]: INFO nova.compute.manager [None req-4ba41043-b68a-4e4c-a787-e1c83a6fcea3 tempest-ServerDiagnosticsV248Test-225293956 tempest-ServerDiagnosticsV248Test-225293956-project-admin] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Retrieving diagnostics [ 603.321260] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14260f9f-da3d-4d8b-85cf-a8024ee84618 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.329024] env[68569]: DEBUG nova.policy [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0fc0aab70b841d991610ba829cc6660', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d628075adbb43b8a572072277c25741', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 603.373908] env[68569]: DEBUG oslo_vmware.api [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166457, 'name': PowerOnVM_Task, 'duration_secs': 0.994473} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.374188] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 603.374382] env[68569]: INFO nova.compute.manager [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Took 8.54 seconds to spawn the instance on the hypervisor. [ 603.374669] env[68569]: DEBUG nova.compute.manager [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 603.375450] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6caeb880-9c36-4eeb-a2e2-9128f4a69962 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.429528] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5215a790-2335-999a-d7da-306e009b144e, 'name': SearchDatastore_Task, 'duration_secs': 0.013014} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.431945] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 603.432302] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 8eb18d79-e164-4e66-83b0-7b40d04c30a8/8eb18d79-e164-4e66-83b0-7b40d04c30a8.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 603.432594] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3898bc3-ca94-465b-9965-a39ea19cc122 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.442965] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Waiting for the task: (returnval){ [ 603.442965] env[68569]: value = "task-3166459" [ 603.442965] env[68569]: _type = "Task" [ 603.442965] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.452760] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166459, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.477972] env[68569]: DEBUG nova.compute.manager [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 603.528217] env[68569]: INFO nova.compute.manager [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Took 25.92 seconds to build instance. [ 603.665866] env[68569]: DEBUG oslo_vmware.api [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166458, 'name': PowerOnVM_Task, 'duration_secs': 0.688184} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.666383] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 603.666692] env[68569]: INFO nova.compute.manager [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Took 11.17 seconds to spawn the instance on the hypervisor. [ 603.667406] env[68569]: DEBUG nova.compute.manager [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 603.668660] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d112b13-3b39-4240-a0c3-e481a571c683 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.687356] env[68569]: DEBUG nova.network.neutron [req-0ca002fd-4c23-4fa1-99b3-94eb2f765160 req-e2b5a3ab-928e-4462-82bd-91202296cd73 service nova] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Updated VIF entry in instance network info cache for port 0145d676-3585-4d48-ac88-bca4be5f193a. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 603.688085] env[68569]: DEBUG nova.network.neutron [req-0ca002fd-4c23-4fa1-99b3-94eb2f765160 req-e2b5a3ab-928e-4462-82bd-91202296cd73 service nova] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Updating instance_info_cache with network_info: [{"id": "0145d676-3585-4d48-ac88-bca4be5f193a", "address": "fa:16:3e:a4:e8:da", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.172", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0145d676-35", "ovs_interfaceid": "0145d676-3585-4d48-ac88-bca4be5f193a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.702383] env[68569]: DEBUG nova.compute.manager [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 603.740139] env[68569]: WARNING nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 2c47ef2c-873a-4cb3-9a36-aa2155911b6e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 603.740521] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 603.740521] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 3ee3365b-0799-414b-b2a1-1d219bd9db96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 603.742302] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance d6c45731-d76a-46cf-9b7d-be035a200948 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 603.742302] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 7693ef68-d7e5-4899-9615-9f2a1dd0bce8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 603.742302] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 603.742302] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 925400c4-3b33-4f4a-9f63-3ceec06cf0b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 603.742543] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 7129a57f-e639-49ae-96a9-3c1d966034a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 603.742543] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 8eb18d79-e164-4e66-83b0-7b40d04c30a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 603.742543] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 98d5c760-6da3-49e3-af47-20a8054971f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 603.742543] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 1c8dfb47-df19-4101-8d4e-30889d71d7da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 603.742734] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance fb97d2dd-d42a-42e8-9a36-5c913a58b891 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 603.790564] env[68569]: DEBUG nova.network.neutron [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.892109] env[68569]: INFO nova.compute.manager [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Took 24.88 seconds to build instance. [ 603.954955] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166459, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.989240] env[68569]: DEBUG nova.network.neutron [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Successfully created port: 7d78a65a-5ed7-419c-b054-a78d424ca795 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.002513] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.032026] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d7b51a7-bac9-4768-a51c-f7955f7465e7 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Lock "87f6c6d0-4e3d-4608-bd0d-4771bc9c174a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.435s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.032026] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "87f6c6d0-4e3d-4608-bd0d-4771bc9c174a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 16.128s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.037722] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab20735e-9796-4d21-8f4e-ee489526dec7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.109442] env[68569]: DEBUG nova.network.neutron [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Updating instance_info_cache with network_info: [{"id": "bad77068-318c-4c21-8d0f-74d1c5d8da7b", "address": "fa:16:3e:04:a0:b4", "network": {"id": "c61128ba-2dd2-405e-89e7-2b5ff8a8022c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-285039859-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc479c8ce67f4cbebb03cc3df42c900d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbad77068-31", "ovs_interfaceid": "bad77068-318c-4c21-8d0f-74d1c5d8da7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.194730] env[68569]: DEBUG oslo_concurrency.lockutils [req-0ca002fd-4c23-4fa1-99b3-94eb2f765160 req-e2b5a3ab-928e-4462-82bd-91202296cd73 service nova] Releasing lock "refresh_cache-8eb18d79-e164-4e66-83b0-7b40d04c30a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.196687] env[68569]: INFO nova.compute.manager [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Took 25.92 seconds to build instance. [ 604.251997] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 26625edb-06ca-48cc-aaf1-3c55a6ea942b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 604.396322] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4df9ebe-11f4-429a-bed3-e7dc913cc021 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Lock "7129a57f-e639-49ae-96a9-3c1d966034a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.403s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.455666] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166459, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.935221} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.456686] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 8eb18d79-e164-4e66-83b0-7b40d04c30a8/8eb18d79-e164-4e66-83b0-7b40d04c30a8.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 604.457277] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 604.457277] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a5db009-b8d3-424b-89db-5ea4b839b8b5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.464854] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Waiting for the task: (returnval){ [ 604.464854] env[68569]: value = "task-3166460" [ 604.464854] env[68569]: _type = "Task" [ 604.464854] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.476479] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166460, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.533583] env[68569]: DEBUG nova.compute.manager [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 604.555027] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "87f6c6d0-4e3d-4608-bd0d-4771bc9c174a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.520s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.611963] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Releasing lock "refresh_cache-98d5c760-6da3-49e3-af47-20a8054971f3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.612647] env[68569]: DEBUG nova.compute.manager [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Instance network_info: |[{"id": "bad77068-318c-4c21-8d0f-74d1c5d8da7b", "address": "fa:16:3e:04:a0:b4", "network": {"id": "c61128ba-2dd2-405e-89e7-2b5ff8a8022c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-285039859-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc479c8ce67f4cbebb03cc3df42c900d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbad77068-31", "ovs_interfaceid": "bad77068-318c-4c21-8d0f-74d1c5d8da7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 604.613279] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:a0:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bad77068-318c-4c21-8d0f-74d1c5d8da7b', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 604.628406] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Creating folder: Project (bc479c8ce67f4cbebb03cc3df42c900d). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 604.628406] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-63ea39d8-6b69-4680-b343-ad1932ae643e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.639878] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Created folder: Project (bc479c8ce67f4cbebb03cc3df42c900d) in parent group-v633430. [ 604.639878] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Creating folder: Instances. Parent ref: group-v633459. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 604.639878] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe4be6b4-9689-4a31-964e-01e8bffe6cb4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.651871] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Created folder: Instances in parent group-v633459. [ 604.651871] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 604.651871] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 604.652384] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c428bee-dfa7-4244-86eb-995ba97c0ab7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.677437] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 604.677437] env[68569]: value = "task-3166463" [ 604.677437] env[68569]: _type = "Task" [ 604.677437] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.686212] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166463, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.703821] env[68569]: DEBUG oslo_concurrency.lockutils [None req-13b25da3-381f-4ee0-9d83-bc5193571971 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "925400c4-3b33-4f4a-9f63-3ceec06cf0b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.433s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.721678] env[68569]: DEBUG nova.compute.manager [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 604.750717] env[68569]: DEBUG nova.network.neutron [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Successfully updated port: 4a7f66da-42d4-4f36-9888-5bb341b4925c {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 604.756462] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance e77cc179-1f3d-4095-a491-48df7f79bdb9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 604.760092] env[68569]: DEBUG nova.virt.hardware [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 604.760530] env[68569]: DEBUG nova.virt.hardware [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 604.760753] env[68569]: DEBUG nova.virt.hardware [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 604.760970] env[68569]: DEBUG nova.virt.hardware [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 604.761185] env[68569]: DEBUG nova.virt.hardware [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 604.762176] env[68569]: DEBUG nova.virt.hardware [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 604.762176] env[68569]: DEBUG nova.virt.hardware [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 604.762176] env[68569]: DEBUG nova.virt.hardware [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 604.762176] env[68569]: DEBUG nova.virt.hardware [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 604.762373] env[68569]: DEBUG nova.virt.hardware [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 604.762945] env[68569]: DEBUG nova.virt.hardware [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 604.763551] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb0ffd8-e41d-4e02-9e2c-59ac5ab3413e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.775493] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec2d49a-39a9-431f-b0e2-b32727b15357 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.899690] env[68569]: DEBUG nova.compute.manager [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 604.976440] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166460, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082229} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.976685] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 604.977895] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e15ed58-6dfb-4b72-bd49-97e26da3caac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.004514] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] 8eb18d79-e164-4e66-83b0-7b40d04c30a8/8eb18d79-e164-4e66-83b0-7b40d04c30a8.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 605.006611] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7354fa1d-0293-440e-93e7-fd10dfd9ae11 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.026846] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Waiting for the task: (returnval){ [ 605.026846] env[68569]: value = "task-3166464" [ 605.026846] env[68569]: _type = "Task" [ 605.026846] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.036805] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166464, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.060045] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.194032] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166463, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.253509] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Acquiring lock "refresh_cache-1c8dfb47-df19-4101-8d4e-30889d71d7da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.253806] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Acquired lock "refresh_cache-1c8dfb47-df19-4101-8d4e-30889d71d7da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.253880] env[68569]: DEBUG nova.network.neutron [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 605.268312] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 605.431923] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.537651] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166464, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.692941] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166463, 'name': CreateVM_Task, 'duration_secs': 0.593358} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.693223] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 605.694159] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.694371] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.694728] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 605.695033] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-067ecedb-d898-47aa-86f6-d222b1711ed5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.700878] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Waiting for the task: (returnval){ [ 605.700878] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5294fb94-a5de-bcda-bf20-08cc6794e5e3" [ 605.700878] env[68569]: _type = "Task" [ 605.700878] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.711343] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5294fb94-a5de-bcda-bf20-08cc6794e5e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.723614] env[68569]: DEBUG nova.network.neutron [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Successfully updated port: 7d78a65a-5ed7-419c-b054-a78d424ca795 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 605.771400] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance ad207187-634f-4e7f-9809-eb3f742ddeec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 605.824832] env[68569]: DEBUG nova.network.neutron [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.043343] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166464, 'name': ReconfigVM_Task, 'duration_secs': 0.669015} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.043665] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Reconfigured VM instance instance-00000009 to attach disk [datastore2] 8eb18d79-e164-4e66-83b0-7b40d04c30a8/8eb18d79-e164-4e66-83b0-7b40d04c30a8.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 606.045364] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8bdfb505-5bfd-4735-9ed9-a79e7bb27e2a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.053332] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Waiting for the task: (returnval){ [ 606.053332] env[68569]: value = "task-3166465" [ 606.053332] env[68569]: _type = "Task" [ 606.053332] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.065975] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166465, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.173027] env[68569]: DEBUG nova.network.neutron [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Updating instance_info_cache with network_info: [{"id": "4a7f66da-42d4-4f36-9888-5bb341b4925c", "address": "fa:16:3e:20:f0:e9", "network": {"id": "008b2481-d4f3-4644-8a4d-6449f7a1f125", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-18224456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95ddfacc73184dce804752ef6c9fa79f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a7f66da-42", "ovs_interfaceid": "4a7f66da-42d4-4f36-9888-5bb341b4925c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.213350] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5294fb94-a5de-bcda-bf20-08cc6794e5e3, 'name': SearchDatastore_Task, 'duration_secs': 0.029755} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.214231] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.214231] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 606.214318] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.214464] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.214685] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 606.215304] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f009cf62-94f3-440d-b7c3-3356e1990e93 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.227375] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "refresh_cache-fb97d2dd-d42a-42e8-9a36-5c913a58b891" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.227375] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquired lock "refresh_cache-fb97d2dd-d42a-42e8-9a36-5c913a58b891" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.227375] env[68569]: DEBUG nova.network.neutron [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 606.234020] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 606.234020] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 606.234020] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfce1057-c9d1-42f9-b739-08b57887d8a4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.239769] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Waiting for the task: (returnval){ [ 606.239769] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52449dac-1485-71e9-6f46-92c75e270044" [ 606.239769] env[68569]: _type = "Task" [ 606.239769] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.256302] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52449dac-1485-71e9-6f46-92c75e270044, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.280833] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 39a84212-2e52-4dba-b00c-5689564deaf4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 606.281063] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 606.282115] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2624MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 606.565593] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166465, 'name': Rename_Task, 'duration_secs': 0.181109} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.566011] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 606.566641] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1372f464-4bf2-4177-be15-fbdca837779e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.573681] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Waiting for the task: (returnval){ [ 606.573681] env[68569]: value = "task-3166466" [ 606.573681] env[68569]: _type = "Task" [ 606.573681] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.586791] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166466, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.605994] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b81042-1cf7-487c-9f85-65ab0bda34de {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.613600] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c74d0d-f01d-4414-8e32-2000823a7d26 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.643617] env[68569]: DEBUG oslo_concurrency.lockutils [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Acquiring lock "87f6c6d0-4e3d-4608-bd0d-4771bc9c174a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.643957] env[68569]: DEBUG oslo_concurrency.lockutils [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Lock "87f6c6d0-4e3d-4608-bd0d-4771bc9c174a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.644224] env[68569]: DEBUG oslo_concurrency.lockutils [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Acquiring lock "87f6c6d0-4e3d-4608-bd0d-4771bc9c174a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.644463] env[68569]: DEBUG oslo_concurrency.lockutils [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Lock "87f6c6d0-4e3d-4608-bd0d-4771bc9c174a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.644671] env[68569]: DEBUG oslo_concurrency.lockutils [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Lock "87f6c6d0-4e3d-4608-bd0d-4771bc9c174a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.647033] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80176ad3-e95e-4325-bb68-507010e6bae5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.649939] env[68569]: INFO nova.compute.manager [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Terminating instance [ 606.656432] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdcacb43-a582-47d0-89ef-05bb396704dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.669884] env[68569]: DEBUG nova.compute.provider_tree [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 606.679580] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Releasing lock "refresh_cache-1c8dfb47-df19-4101-8d4e-30889d71d7da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.679927] env[68569]: DEBUG nova.compute.manager [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Instance network_info: |[{"id": "4a7f66da-42d4-4f36-9888-5bb341b4925c", "address": "fa:16:3e:20:f0:e9", "network": {"id": "008b2481-d4f3-4644-8a4d-6449f7a1f125", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-18224456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95ddfacc73184dce804752ef6c9fa79f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a7f66da-42", "ovs_interfaceid": "4a7f66da-42d4-4f36-9888-5bb341b4925c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 606.680571] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:f0:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd733acc2-07d0-479e-918c-ec8a21925389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a7f66da-42d4-4f36-9888-5bb341b4925c', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 606.687965] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Creating folder: Project (95ddfacc73184dce804752ef6c9fa79f). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 606.688507] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1cb03f6-afed-4535-aaa0-67a7f1cc71c1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.700873] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Created folder: Project (95ddfacc73184dce804752ef6c9fa79f) in parent group-v633430. [ 606.701092] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Creating folder: Instances. Parent ref: group-v633462. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 606.701560] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-151dabf3-f663-439e-96e7-d6624592270e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.711648] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Created folder: Instances in parent group-v633462. [ 606.711915] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 606.712147] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 606.712337] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b39e873-9519-4a3b-be44-e20509776934 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.731883] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 606.731883] env[68569]: value = "task-3166469" [ 606.731883] env[68569]: _type = "Task" [ 606.731883] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.743303] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166469, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.752382] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52449dac-1485-71e9-6f46-92c75e270044, 'name': SearchDatastore_Task, 'duration_secs': 0.018902} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.752490] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51e658e5-eef6-40fc-91c1-92cba8959515 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.758116] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Waiting for the task: (returnval){ [ 606.758116] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520e3244-2c3a-aa2b-3234-72bb18cf9909" [ 606.758116] env[68569]: _type = "Task" [ 606.758116] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.766080] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520e3244-2c3a-aa2b-3234-72bb18cf9909, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.792943] env[68569]: DEBUG nova.network.neutron [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.074628] env[68569]: DEBUG nova.network.neutron [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Updating instance_info_cache with network_info: [{"id": "7d78a65a-5ed7-419c-b054-a78d424ca795", "address": "fa:16:3e:f7:35:b8", "network": {"id": "d6edc444-be83-4191-989a-6abe41074736", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-628698740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d628075adbb43b8a572072277c25741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d78a65a-5e", "ovs_interfaceid": "7d78a65a-5ed7-419c-b054-a78d424ca795", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.086680] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166466, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.158579] env[68569]: DEBUG nova.compute.manager [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 607.158579] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 607.158579] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f83d07c-86a7-461a-b20a-45636fdcefb8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.172032] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 607.175747] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 607.179936] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b41e7acc-6d10-4813-ad4d-7eb7109f2c63 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.188011] env[68569]: DEBUG oslo_vmware.api [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Waiting for the task: (returnval){ [ 607.188011] env[68569]: value = "task-3166470" [ 607.188011] env[68569]: _type = "Task" [ 607.188011] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.196485] env[68569]: DEBUG oslo_vmware.api [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Task: {'id': task-3166470, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.245271] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166469, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.269284] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520e3244-2c3a-aa2b-3234-72bb18cf9909, 'name': SearchDatastore_Task, 'duration_secs': 0.020061} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.269595] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 607.270089] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 98d5c760-6da3-49e3-af47-20a8054971f3/98d5c760-6da3-49e3-af47-20a8054971f3.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 607.270462] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a98b9334-ed68-4b09-ba09-9fa20d970f06 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.278205] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Waiting for the task: (returnval){ [ 607.278205] env[68569]: value = "task-3166471" [ 607.278205] env[68569]: _type = "Task" [ 607.278205] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.287949] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166471, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.578369] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Releasing lock "refresh_cache-fb97d2dd-d42a-42e8-9a36-5c913a58b891" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 607.578721] env[68569]: DEBUG nova.compute.manager [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Instance network_info: |[{"id": "7d78a65a-5ed7-419c-b054-a78d424ca795", "address": "fa:16:3e:f7:35:b8", "network": {"id": "d6edc444-be83-4191-989a-6abe41074736", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-628698740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d628075adbb43b8a572072277c25741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d78a65a-5e", "ovs_interfaceid": "7d78a65a-5ed7-419c-b054-a78d424ca795", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 607.582972] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:35:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62f28d75-4e6a-4ae5-b8b3-d0652ea26d08', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d78a65a-5ed7-419c-b054-a78d424ca795', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 607.592414] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Creating folder: Project (3d628075adbb43b8a572072277c25741). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 607.592414] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-038d9068-f525-4ebe-b134-2c343708cf4d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.600903] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166466, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.641103] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Created folder: Project (3d628075adbb43b8a572072277c25741) in parent group-v633430. [ 607.641668] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Creating folder: Instances. Parent ref: group-v633465. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 607.642195] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-333e6b0f-921f-4a92-b693-e8bbea2984c4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.655040] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Created folder: Instances in parent group-v633465. [ 607.655040] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 607.655040] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 607.655040] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca8d74b1-ac89-459e-bbea-bf94ee88649c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.681081] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 607.681081] env[68569]: value = "task-3166474" [ 607.681081] env[68569]: _type = "Task" [ 607.681081] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.684413] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 607.685712] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.002s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.685712] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.503s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.685712] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.688218] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.327s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.691710] env[68569]: INFO nova.compute.claims [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 607.693664] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 607.694290] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Getting list of instances from cluster (obj){ [ 607.694290] env[68569]: value = "domain-c8" [ 607.694290] env[68569]: _type = "ClusterComputeResource" [ 607.694290] env[68569]: } {{(pid=68569) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 607.696600] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbbfef9-88fe-4b2e-b697-1ea870c4566e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.711868] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166474, 'name': CreateVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.716071] env[68569]: DEBUG nova.compute.manager [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Received event network-vif-plugged-bad77068-318c-4c21-8d0f-74d1c5d8da7b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 607.716505] env[68569]: DEBUG oslo_concurrency.lockutils [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] Acquiring lock "98d5c760-6da3-49e3-af47-20a8054971f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 607.716895] env[68569]: DEBUG oslo_concurrency.lockutils [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] Lock "98d5c760-6da3-49e3-af47-20a8054971f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.718018] env[68569]: DEBUG oslo_concurrency.lockutils [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] Lock "98d5c760-6da3-49e3-af47-20a8054971f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.718018] env[68569]: DEBUG nova.compute.manager [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] No waiting events found dispatching network-vif-plugged-bad77068-318c-4c21-8d0f-74d1c5d8da7b {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 607.718018] env[68569]: WARNING nova.compute.manager [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Received unexpected event network-vif-plugged-bad77068-318c-4c21-8d0f-74d1c5d8da7b for instance with vm_state building and task_state spawning. [ 607.718018] env[68569]: DEBUG nova.compute.manager [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Received event network-changed-bad77068-318c-4c21-8d0f-74d1c5d8da7b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 607.718292] env[68569]: DEBUG nova.compute.manager [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Refreshing instance network info cache due to event network-changed-bad77068-318c-4c21-8d0f-74d1c5d8da7b. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 607.718772] env[68569]: DEBUG oslo_concurrency.lockutils [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] Acquiring lock "refresh_cache-98d5c760-6da3-49e3-af47-20a8054971f3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.719034] env[68569]: DEBUG oslo_concurrency.lockutils [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] Acquired lock "refresh_cache-98d5c760-6da3-49e3-af47-20a8054971f3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 607.719359] env[68569]: DEBUG nova.network.neutron [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Refreshing network info cache for port bad77068-318c-4c21-8d0f-74d1c5d8da7b {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 607.733751] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Got total of 10 instances {{(pid=68569) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 607.741585] env[68569]: INFO nova.scheduler.client.report [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Deleted allocations for instance 2c47ef2c-873a-4cb3-9a36-aa2155911b6e [ 607.746221] env[68569]: DEBUG oslo_vmware.api [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Task: {'id': task-3166470, 'name': PowerOffVM_Task, 'duration_secs': 0.226692} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.750643] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 607.750643] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 607.752155] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19e2c66c-2e48-4dc2-baa4-c6d54d2257cd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.763378] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166469, 'name': CreateVM_Task, 'duration_secs': 0.904044} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.763378] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 607.763547] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.763701] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 607.764075] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 607.764625] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79510652-5fe6-4009-9597-e786e9110f40 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.771151] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Waiting for the task: (returnval){ [ 607.771151] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f15252-34e0-4b6d-8979-1ef965c097d7" [ 607.771151] env[68569]: _type = "Task" [ 607.771151] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.791543] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166471, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.858848] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 607.859253] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 607.859313] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Deleting the datastore file [datastore1] 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 607.861473] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7723210-cd80-477f-a84c-fc4c99cb4445 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.867284] env[68569]: DEBUG oslo_vmware.api [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Waiting for the task: (returnval){ [ 607.867284] env[68569]: value = "task-3166476" [ 607.867284] env[68569]: _type = "Task" [ 607.867284] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.876871] env[68569]: DEBUG oslo_vmware.api [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Task: {'id': task-3166476, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.097536] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166466, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.191441] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166474, 'name': CreateVM_Task, 'duration_secs': 0.477374} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.191441] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 608.192212] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.258298] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fc587240-99e5-4394-a5c4-4619f2d9fca8 tempest-ServersAaction247Test-713867926 tempest-ServersAaction247Test-713867926-project-member] Lock "2c47ef2c-873a-4cb3-9a36-aa2155911b6e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.520s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.301127] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f15252-34e0-4b6d-8979-1ef965c097d7, 'name': SearchDatastore_Task, 'duration_secs': 0.064302} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.305542] env[68569]: DEBUG nova.network.neutron [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Updated VIF entry in instance network info cache for port bad77068-318c-4c21-8d0f-74d1c5d8da7b. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 608.306063] env[68569]: DEBUG nova.network.neutron [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Updating instance_info_cache with network_info: [{"id": "bad77068-318c-4c21-8d0f-74d1c5d8da7b", "address": "fa:16:3e:04:a0:b4", "network": {"id": "c61128ba-2dd2-405e-89e7-2b5ff8a8022c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-285039859-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc479c8ce67f4cbebb03cc3df42c900d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbad77068-31", "ovs_interfaceid": "bad77068-318c-4c21-8d0f-74d1c5d8da7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.308233] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.308233] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 608.308233] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.308233] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.308422] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 608.308749] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166471, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577116} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.309413] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.309606] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 608.309857] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-086f9177-67e5-4951-8cec-2ff23fbbf396 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.312872] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 98d5c760-6da3-49e3-af47-20a8054971f3/98d5c760-6da3-49e3-af47-20a8054971f3.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 608.312872] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 608.312872] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15c7b441-460f-48e4-a08c-b8495e73db09 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.314166] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba1f3875-e25b-47dc-a9a5-9ac5f0be12cd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.325947] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 608.325947] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522a5e8b-4089-9440-6340-057973ae6ea6" [ 608.325947] env[68569]: _type = "Task" [ 608.325947] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.331624] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Waiting for the task: (returnval){ [ 608.331624] env[68569]: value = "task-3166477" [ 608.331624] env[68569]: _type = "Task" [ 608.331624] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.336479] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 608.336847] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 608.344520] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6350e9c3-ecd5-4414-9f43-d3f3e5cf1df4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.353890] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522a5e8b-4089-9440-6340-057973ae6ea6, 'name': SearchDatastore_Task, 'duration_secs': 0.014219} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.358554] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.358937] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 608.359305] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.359619] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Waiting for the task: (returnval){ [ 608.359619] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526a540e-3ce2-d4ec-9d41-83eb6a3c56d1" [ 608.359619] env[68569]: _type = "Task" [ 608.359619] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.360128] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166477, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.371073] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526a540e-3ce2-d4ec-9d41-83eb6a3c56d1, 'name': SearchDatastore_Task, 'duration_secs': 0.014316} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.375030] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a8d5b6a-2ece-46e3-9d76-1cc572f88c64 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.383362] env[68569]: DEBUG oslo_vmware.api [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Task: {'id': task-3166476, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.222645} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.385471] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 608.385471] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 608.385661] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 608.385867] env[68569]: INFO nova.compute.manager [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Took 1.23 seconds to destroy the instance on the hypervisor. [ 608.386185] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 608.386474] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Waiting for the task: (returnval){ [ 608.386474] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a66e87-6c6f-4e7f-8a20-be7a5f8f8b00" [ 608.386474] env[68569]: _type = "Task" [ 608.386474] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.386788] env[68569]: DEBUG nova.compute.manager [-] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 608.388199] env[68569]: DEBUG nova.network.neutron [-] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 608.408498] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a66e87-6c6f-4e7f-8a20-be7a5f8f8b00, 'name': SearchDatastore_Task, 'duration_secs': 0.013155} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.408838] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.409116] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 1c8dfb47-df19-4101-8d4e-30889d71d7da/1c8dfb47-df19-4101-8d4e-30889d71d7da.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 608.409429] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.409622] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 608.409843] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b7e6cc5b-21f8-4e2f-86b7-b9ca7f47f060 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.412832] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21e9a3a8-79fe-4f03-9c7e-5e066581b98c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.419088] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Waiting for the task: (returnval){ [ 608.419088] env[68569]: value = "task-3166478" [ 608.419088] env[68569]: _type = "Task" [ 608.419088] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.425263] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 608.425483] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 608.426903] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b56c7b84-e702-4758-8c8e-c549ad49cd0e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.434074] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166478, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.437559] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 608.437559] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5206786e-f345-1b68-dea2-04ea08001945" [ 608.437559] env[68569]: _type = "Task" [ 608.437559] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.445687] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5206786e-f345-1b68-dea2-04ea08001945, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.593632] env[68569]: DEBUG oslo_vmware.api [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166466, 'name': PowerOnVM_Task, 'duration_secs': 1.548424} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.595381] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 608.595637] env[68569]: INFO nova.compute.manager [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Took 11.33 seconds to spawn the instance on the hypervisor. [ 608.595905] env[68569]: DEBUG nova.compute.manager [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 608.597534] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8b5747-4b18-4c2b-9538-58a87ec55e4d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.761713] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "c56e4282-b1ca-42f5-b346-692779475df0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.762216] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "c56e4282-b1ca-42f5-b346-692779475df0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.813575] env[68569]: DEBUG oslo_concurrency.lockutils [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] Releasing lock "refresh_cache-98d5c760-6da3-49e3-af47-20a8054971f3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.813575] env[68569]: DEBUG nova.compute.manager [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Received event network-vif-plugged-4a7f66da-42d4-4f36-9888-5bb341b4925c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 608.813575] env[68569]: DEBUG oslo_concurrency.lockutils [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] Acquiring lock "1c8dfb47-df19-4101-8d4e-30889d71d7da-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.813575] env[68569]: DEBUG oslo_concurrency.lockutils [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] Lock "1c8dfb47-df19-4101-8d4e-30889d71d7da-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.813575] env[68569]: DEBUG oslo_concurrency.lockutils [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] Lock "1c8dfb47-df19-4101-8d4e-30889d71d7da-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.813759] env[68569]: DEBUG nova.compute.manager [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] No waiting events found dispatching network-vif-plugged-4a7f66da-42d4-4f36-9888-5bb341b4925c {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 608.813759] env[68569]: WARNING nova.compute.manager [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Received unexpected event network-vif-plugged-4a7f66da-42d4-4f36-9888-5bb341b4925c for instance with vm_state building and task_state spawning. [ 608.813759] env[68569]: DEBUG nova.compute.manager [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Received event network-changed-4a7f66da-42d4-4f36-9888-5bb341b4925c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 608.813759] env[68569]: DEBUG nova.compute.manager [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Refreshing instance network info cache due to event network-changed-4a7f66da-42d4-4f36-9888-5bb341b4925c. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 608.813759] env[68569]: DEBUG oslo_concurrency.lockutils [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] Acquiring lock "refresh_cache-1c8dfb47-df19-4101-8d4e-30889d71d7da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.813921] env[68569]: DEBUG oslo_concurrency.lockutils [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] Acquired lock "refresh_cache-1c8dfb47-df19-4101-8d4e-30889d71d7da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.813921] env[68569]: DEBUG nova.network.neutron [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Refreshing network info cache for port 4a7f66da-42d4-4f36-9888-5bb341b4925c {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 608.851192] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166477, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105694} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.851451] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 608.853921] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34b7c75-4465-405e-a211-c63ff7cdca1e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.878118] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 98d5c760-6da3-49e3-af47-20a8054971f3/98d5c760-6da3-49e3-af47-20a8054971f3.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 608.881067] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-683e40aa-90d5-4f19-8718-22a9d2ff38e3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.902399] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Waiting for the task: (returnval){ [ 608.902399] env[68569]: value = "task-3166479" [ 608.902399] env[68569]: _type = "Task" [ 608.902399] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.912226] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166479, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.935581] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166478, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.947153] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5206786e-f345-1b68-dea2-04ea08001945, 'name': SearchDatastore_Task, 'duration_secs': 0.015545} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.948378] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db686099-e778-4c87-ac5f-cc3e5a957f0d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.956400] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 608.956400] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5266415b-2b87-1743-03e7-869f3adfdf1e" [ 608.956400] env[68569]: _type = "Task" [ 608.956400] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.964418] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5266415b-2b87-1743-03e7-869f3adfdf1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.118734] env[68569]: INFO nova.compute.manager [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Took 28.96 seconds to build instance. [ 609.159882] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3261ed96-8ae2-4010-afa4-9bfa28db3731 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.170382] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1bbd86b-22ec-47be-ab4d-40bd9dd926d1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.216906] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ec0f45-a13d-45ec-8254-47b158516f39 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.225479] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27c75bf-891d-44e8-abcd-36eaa251a9ae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.240169] env[68569]: DEBUG nova.compute.provider_tree [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.266998] env[68569]: DEBUG nova.compute.manager [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 609.417539] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166479, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.432307] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166478, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.941742} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.432726] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 1c8dfb47-df19-4101-8d4e-30889d71d7da/1c8dfb47-df19-4101-8d4e-30889d71d7da.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 609.433170] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 609.433781] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a54a3d07-87b5-43e9-a965-95c1deb43806 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.443067] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Waiting for the task: (returnval){ [ 609.443067] env[68569]: value = "task-3166484" [ 609.443067] env[68569]: _type = "Task" [ 609.443067] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.452313] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166484, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.468399] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5266415b-2b87-1743-03e7-869f3adfdf1e, 'name': SearchDatastore_Task, 'duration_secs': 0.029563} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.469483] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.469483] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] fb97d2dd-d42a-42e8-9a36-5c913a58b891/fb97d2dd-d42a-42e8-9a36-5c913a58b891.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 609.469948] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c31ef67-dc3b-4a8b-9c4d-a81d6dae7b4d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.486030] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 609.486030] env[68569]: value = "task-3166485" [ 609.486030] env[68569]: _type = "Task" [ 609.486030] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.508026] env[68569]: DEBUG nova.network.neutron [-] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.508026] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166485, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.592970] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "40b768c1-9007-4f78-a90f-61b2ac64553f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.593392] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "40b768c1-9007-4f78-a90f-61b2ac64553f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 609.622285] env[68569]: DEBUG oslo_concurrency.lockutils [None req-41945de7-9df7-4d88-8e85-6940a7b0d2aa tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Lock "8eb18d79-e164-4e66-83b0-7b40d04c30a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.520s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 609.746725] env[68569]: DEBUG nova.scheduler.client.report [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 609.797767] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.918811] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166479, 'name': ReconfigVM_Task, 'duration_secs': 0.739989} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.919387] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 98d5c760-6da3-49e3-af47-20a8054971f3/98d5c760-6da3-49e3-af47-20a8054971f3.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 609.920178] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ce8603c-a23b-44f1-acb4-bfa76ef6ec8e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.928135] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Waiting for the task: (returnval){ [ 609.928135] env[68569]: value = "task-3166486" [ 609.928135] env[68569]: _type = "Task" [ 609.928135] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.937715] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166486, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.952452] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166484, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087634} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.952651] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 609.953458] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfd2ccf-90a4-4dc3-8955-c00bdbeb33fe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.981441] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 1c8dfb47-df19-4101-8d4e-30889d71d7da/1c8dfb47-df19-4101-8d4e-30889d71d7da.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 609.982073] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c757f700-948f-42fb-952b-2c9d7de88c42 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.000165] env[68569]: DEBUG nova.network.neutron [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Updated VIF entry in instance network info cache for port 4a7f66da-42d4-4f36-9888-5bb341b4925c. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 610.000165] env[68569]: DEBUG nova.network.neutron [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Updating instance_info_cache with network_info: [{"id": "4a7f66da-42d4-4f36-9888-5bb341b4925c", "address": "fa:16:3e:20:f0:e9", "network": {"id": "008b2481-d4f3-4644-8a4d-6449f7a1f125", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-18224456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95ddfacc73184dce804752ef6c9fa79f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a7f66da-42", "ovs_interfaceid": "4a7f66da-42d4-4f36-9888-5bb341b4925c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.010849] env[68569]: DEBUG nova.compute.manager [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Stashing vm_state: active {{(pid=68569) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 610.015378] env[68569]: INFO nova.compute.manager [-] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Took 1.63 seconds to deallocate network for instance. [ 610.023497] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Waiting for the task: (returnval){ [ 610.023497] env[68569]: value = "task-3166487" [ 610.023497] env[68569]: _type = "Task" [ 610.023497] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.023738] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166485, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.037270] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166487, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.124892] env[68569]: DEBUG nova.compute.manager [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 610.257955] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.258733] env[68569]: DEBUG nova.compute.manager [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 610.261020] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.010s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.262488] env[68569]: INFO nova.compute.claims [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 610.441585] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166486, 'name': Rename_Task, 'duration_secs': 0.382495} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.441846] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 610.442200] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3330c842-9b13-4671-8155-783412e57105 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.451073] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Waiting for the task: (returnval){ [ 610.451073] env[68569]: value = "task-3166488" [ 610.451073] env[68569]: _type = "Task" [ 610.451073] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.460749] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166488, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.509467] env[68569]: DEBUG oslo_concurrency.lockutils [req-0e7d35ab-437b-43e3-b50e-19f55a90af6f req-e6439830-99c5-4812-b620-a01dc3ed5246 service nova] Releasing lock "refresh_cache-1c8dfb47-df19-4101-8d4e-30889d71d7da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.512722] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166485, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.69582} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.513028] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] fb97d2dd-d42a-42e8-9a36-5c913a58b891/fb97d2dd-d42a-42e8-9a36-5c913a58b891.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 610.513307] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 610.513584] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3531272-d9c4-44f3-b7ce-0f3c198a9c8a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.522675] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 610.522675] env[68569]: value = "task-3166489" [ 610.522675] env[68569]: _type = "Task" [ 610.522675] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.537094] env[68569]: DEBUG oslo_concurrency.lockutils [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.543521] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166489, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.547870] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.548334] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166487, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.653413] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.767791] env[68569]: DEBUG nova.compute.utils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 610.775293] env[68569]: DEBUG nova.compute.manager [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 610.775293] env[68569]: DEBUG nova.network.neutron [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 610.837625] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "c9264123-ab19-40d5-959a-791b8966d2f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.837891] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "c9264123-ab19-40d5-959a-791b8966d2f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.898604] env[68569]: DEBUG nova.policy [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '332746b4e6ee4faf82bc84c5ab48f7d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e7a377a9a354da38e1ed136c3d93d03', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 610.965753] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166488, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.982975] env[68569]: DEBUG nova.compute.manager [None req-fec8b3cd-4aba-408e-aa15-c83c8723205f tempest-ServerDiagnosticsTest-1715771348 tempest-ServerDiagnosticsTest-1715771348-project-admin] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 610.984386] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f646162-d0e4-46e9-b196-e2421a1bbcd2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.994656] env[68569]: INFO nova.compute.manager [None req-fec8b3cd-4aba-408e-aa15-c83c8723205f tempest-ServerDiagnosticsTest-1715771348 tempest-ServerDiagnosticsTest-1715771348-project-admin] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Retrieving diagnostics [ 610.995556] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96033786-e8e0-4388-9582-0ea482f14417 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.043989] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166489, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071175} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.047281] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 611.047977] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166487, 'name': ReconfigVM_Task, 'duration_secs': 0.787322} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.048699] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8e7e7d-b107-4e4e-b03a-02ac1aaaf003 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.052165] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 1c8dfb47-df19-4101-8d4e-30889d71d7da/1c8dfb47-df19-4101-8d4e-30889d71d7da.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 611.052821] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d74c967b-6cdb-4a99-b787-4c9498c7e9b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.081758] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] fb97d2dd-d42a-42e8-9a36-5c913a58b891/fb97d2dd-d42a-42e8-9a36-5c913a58b891.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 611.084390] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9febcfd0-d4f2-46dd-871e-d9075bd71a50 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.101077] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Waiting for the task: (returnval){ [ 611.101077] env[68569]: value = "task-3166490" [ 611.101077] env[68569]: _type = "Task" [ 611.101077] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.106852] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 611.106852] env[68569]: value = "task-3166491" [ 611.106852] env[68569]: _type = "Task" [ 611.106852] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.120363] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166490, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.123280] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166491, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.275475] env[68569]: DEBUG nova.compute.manager [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 611.465065] env[68569]: DEBUG oslo_vmware.api [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166488, 'name': PowerOnVM_Task, 'duration_secs': 0.689529} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.465065] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 611.465065] env[68569]: INFO nova.compute.manager [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Took 11.76 seconds to spawn the instance on the hypervisor. [ 611.465065] env[68569]: DEBUG nova.compute.manager [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 611.470166] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d85cfc5-1140-4060-87d0-6610de38c720 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.615020] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166490, 'name': Rename_Task, 'duration_secs': 0.174489} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.615968] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 611.616031] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c4370ba-30f0-4585-ac67-79e73ab61540 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.624808] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166491, 'name': ReconfigVM_Task, 'duration_secs': 0.447257} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.625649] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Reconfigured VM instance instance-0000000c to attach disk [datastore2] fb97d2dd-d42a-42e8-9a36-5c913a58b891/fb97d2dd-d42a-42e8-9a36-5c913a58b891.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 611.626132] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7396dea-05ac-4a6d-9120-7d69789e937a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.631833] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Waiting for the task: (returnval){ [ 611.631833] env[68569]: value = "task-3166492" [ 611.631833] env[68569]: _type = "Task" [ 611.631833] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.635993] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 611.635993] env[68569]: value = "task-3166493" [ 611.635993] env[68569]: _type = "Task" [ 611.635993] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.643764] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166492, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.649650] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166493, 'name': Rename_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.681387] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a05f7cc-5a8d-46a6-81b1-230064708706 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.689151] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bfda430-2433-44bd-bcc0-86f96cc24921 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.722810] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78bf3a8-5383-41cf-a128-7d2ef2402959 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.730583] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9c0f79-3e81-4192-8535-d70534b37834 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.748118] env[68569]: DEBUG nova.compute.provider_tree [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 611.954226] env[68569]: DEBUG nova.compute.manager [req-13f24a40-cf8a-4021-a3a5-4d9641cc89c7 req-fca9083f-e92a-4712-a6b0-1d2df09fefba service nova] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Received event network-vif-plugged-7d78a65a-5ed7-419c-b054-a78d424ca795 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 611.955077] env[68569]: DEBUG oslo_concurrency.lockutils [req-13f24a40-cf8a-4021-a3a5-4d9641cc89c7 req-fca9083f-e92a-4712-a6b0-1d2df09fefba service nova] Acquiring lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.957358] env[68569]: DEBUG oslo_concurrency.lockutils [req-13f24a40-cf8a-4021-a3a5-4d9641cc89c7 req-fca9083f-e92a-4712-a6b0-1d2df09fefba service nova] Lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 611.957358] env[68569]: DEBUG oslo_concurrency.lockutils [req-13f24a40-cf8a-4021-a3a5-4d9641cc89c7 req-fca9083f-e92a-4712-a6b0-1d2df09fefba service nova] Lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 611.957358] env[68569]: DEBUG nova.compute.manager [req-13f24a40-cf8a-4021-a3a5-4d9641cc89c7 req-fca9083f-e92a-4712-a6b0-1d2df09fefba service nova] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] No waiting events found dispatching network-vif-plugged-7d78a65a-5ed7-419c-b054-a78d424ca795 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 611.957358] env[68569]: WARNING nova.compute.manager [req-13f24a40-cf8a-4021-a3a5-4d9641cc89c7 req-fca9083f-e92a-4712-a6b0-1d2df09fefba service nova] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Received unexpected event network-vif-plugged-7d78a65a-5ed7-419c-b054-a78d424ca795 for instance with vm_state building and task_state spawning. [ 611.957358] env[68569]: DEBUG nova.compute.manager [req-13f24a40-cf8a-4021-a3a5-4d9641cc89c7 req-fca9083f-e92a-4712-a6b0-1d2df09fefba service nova] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Received event network-changed-7d78a65a-5ed7-419c-b054-a78d424ca795 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 611.959853] env[68569]: DEBUG nova.compute.manager [req-13f24a40-cf8a-4021-a3a5-4d9641cc89c7 req-fca9083f-e92a-4712-a6b0-1d2df09fefba service nova] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Refreshing instance network info cache due to event network-changed-7d78a65a-5ed7-419c-b054-a78d424ca795. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 611.960199] env[68569]: DEBUG oslo_concurrency.lockutils [req-13f24a40-cf8a-4021-a3a5-4d9641cc89c7 req-fca9083f-e92a-4712-a6b0-1d2df09fefba service nova] Acquiring lock "refresh_cache-fb97d2dd-d42a-42e8-9a36-5c913a58b891" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.960339] env[68569]: DEBUG oslo_concurrency.lockutils [req-13f24a40-cf8a-4021-a3a5-4d9641cc89c7 req-fca9083f-e92a-4712-a6b0-1d2df09fefba service nova] Acquired lock "refresh_cache-fb97d2dd-d42a-42e8-9a36-5c913a58b891" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 611.960505] env[68569]: DEBUG nova.network.neutron [req-13f24a40-cf8a-4021-a3a5-4d9641cc89c7 req-fca9083f-e92a-4712-a6b0-1d2df09fefba service nova] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Refreshing network info cache for port 7d78a65a-5ed7-419c-b054-a78d424ca795 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 611.999054] env[68569]: INFO nova.compute.manager [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Took 30.56 seconds to build instance. [ 612.011107] env[68569]: DEBUG nova.network.neutron [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Successfully created port: cacf394f-f702-4d94-8010-c6f2d2e14123 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 612.146974] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166492, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.151213] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166493, 'name': Rename_Task, 'duration_secs': 0.260571} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.151491] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 612.151740] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71b8eec8-3ff8-4738-8828-71fb788363dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.160168] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 612.160168] env[68569]: value = "task-3166495" [ 612.160168] env[68569]: _type = "Task" [ 612.160168] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.173629] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166495, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.282157] env[68569]: ERROR nova.scheduler.client.report [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [req-ae00b5d0-61d4-476c-9b93-dc4d7e787b17] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ae00b5d0-61d4-476c-9b93-dc4d7e787b17"}]} [ 612.290260] env[68569]: DEBUG nova.compute.manager [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 612.311829] env[68569]: DEBUG nova.scheduler.client.report [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 612.326093] env[68569]: DEBUG nova.virt.hardware [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 612.326344] env[68569]: DEBUG nova.virt.hardware [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.326496] env[68569]: DEBUG nova.virt.hardware [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 612.326699] env[68569]: DEBUG nova.virt.hardware [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.326856] env[68569]: DEBUG nova.virt.hardware [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 612.326998] env[68569]: DEBUG nova.virt.hardware [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 612.328231] env[68569]: DEBUG nova.virt.hardware [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 612.328462] env[68569]: DEBUG nova.virt.hardware [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 612.328682] env[68569]: DEBUG nova.virt.hardware [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 612.331021] env[68569]: DEBUG nova.virt.hardware [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 612.331021] env[68569]: DEBUG nova.virt.hardware [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 612.331021] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7ab6b7-c5c2-49ba-834b-ebdb30352b2a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.338512] env[68569]: DEBUG nova.scheduler.client.report [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 612.338687] env[68569]: DEBUG nova.compute.provider_tree [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 612.347451] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7bdb0d6-1703-469b-89b2-2f74352305f6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.370027] env[68569]: DEBUG nova.scheduler.client.report [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 612.395730] env[68569]: DEBUG nova.scheduler.client.report [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 612.500455] env[68569]: DEBUG oslo_concurrency.lockutils [None req-71a8f5c1-866f-4fca-a422-d938c24fb8ad tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Lock "98d5c760-6da3-49e3-af47-20a8054971f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.076s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.524498] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Acquiring lock "d6c45731-d76a-46cf-9b7d-be035a200948" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.525255] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Lock "d6c45731-d76a-46cf-9b7d-be035a200948" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.525255] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Acquiring lock "d6c45731-d76a-46cf-9b7d-be035a200948-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.525255] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Lock "d6c45731-d76a-46cf-9b7d-be035a200948-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.525471] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Lock "d6c45731-d76a-46cf-9b7d-be035a200948-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.527318] env[68569]: INFO nova.compute.manager [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Terminating instance [ 612.646517] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166492, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.675493] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166495, 'name': PowerOnVM_Task} progress is 1%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.769466] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-713cfb3b-28c5-435b-810d-e9fecfab21e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.779919] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5525131a-ec84-4885-8411-ace569040e33 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.817296] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd8baf1-a7bd-4c7a-8d0d-69176667131d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.827681] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9240e6-1731-43db-98d9-ee8c0330a26f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.844289] env[68569]: DEBUG nova.compute.provider_tree [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 613.007674] env[68569]: DEBUG nova.compute.manager [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 613.032552] env[68569]: DEBUG nova.compute.manager [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 613.032552] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 613.033476] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2400f379-81d8-4138-af79-7ee9c85e4074 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.048654] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 613.048977] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6905e14b-7f2c-4a6c-b1b7-f8a16a3ab4da {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.062474] env[68569]: DEBUG oslo_vmware.api [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Waiting for the task: (returnval){ [ 613.062474] env[68569]: value = "task-3166496" [ 613.062474] env[68569]: _type = "Task" [ 613.062474] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.079862] env[68569]: DEBUG oslo_vmware.api [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166496, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.129441] env[68569]: DEBUG nova.network.neutron [req-13f24a40-cf8a-4021-a3a5-4d9641cc89c7 req-fca9083f-e92a-4712-a6b0-1d2df09fefba service nova] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Updated VIF entry in instance network info cache for port 7d78a65a-5ed7-419c-b054-a78d424ca795. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 613.129820] env[68569]: DEBUG nova.network.neutron [req-13f24a40-cf8a-4021-a3a5-4d9641cc89c7 req-fca9083f-e92a-4712-a6b0-1d2df09fefba service nova] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Updating instance_info_cache with network_info: [{"id": "7d78a65a-5ed7-419c-b054-a78d424ca795", "address": "fa:16:3e:f7:35:b8", "network": {"id": "d6edc444-be83-4191-989a-6abe41074736", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-628698740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d628075adbb43b8a572072277c25741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d78a65a-5e", "ovs_interfaceid": "7d78a65a-5ed7-419c-b054-a78d424ca795", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.145930] env[68569]: DEBUG oslo_vmware.api [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166492, 'name': PowerOnVM_Task, 'duration_secs': 1.057235} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.147123] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 613.147461] env[68569]: INFO nova.compute.manager [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Took 10.87 seconds to spawn the instance on the hypervisor. [ 613.147860] env[68569]: DEBUG nova.compute.manager [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 613.149166] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6da374-a5bc-4de4-b0ca-39136f7543cc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.173852] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166495, 'name': PowerOnVM_Task} progress is 64%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.373357] env[68569]: ERROR nova.scheduler.client.report [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [req-7faa32ec-fd20-40a4-8113-7a5db409adfd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7faa32ec-fd20-40a4-8113-7a5db409adfd"}]} [ 613.396744] env[68569]: DEBUG nova.scheduler.client.report [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 613.421304] env[68569]: DEBUG nova.scheduler.client.report [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 613.421656] env[68569]: DEBUG nova.compute.provider_tree [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 613.438529] env[68569]: DEBUG nova.scheduler.client.report [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 613.472329] env[68569]: DEBUG nova.scheduler.client.report [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 613.537097] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.577184] env[68569]: DEBUG oslo_vmware.api [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166496, 'name': PowerOffVM_Task, 'duration_secs': 0.354206} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.577448] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 613.577602] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 613.577844] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c484ecb5-4a6b-4d4e-aa93-f63d6b404fec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.615758] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Acquiring lock "8eb18d79-e164-4e66-83b0-7b40d04c30a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.616469] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Lock "8eb18d79-e164-4e66-83b0-7b40d04c30a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.616469] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Acquiring lock "8eb18d79-e164-4e66-83b0-7b40d04c30a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.616469] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Lock "8eb18d79-e164-4e66-83b0-7b40d04c30a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.616622] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Lock "8eb18d79-e164-4e66-83b0-7b40d04c30a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.626126] env[68569]: INFO nova.compute.manager [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Terminating instance [ 613.638201] env[68569]: DEBUG oslo_concurrency.lockutils [req-13f24a40-cf8a-4021-a3a5-4d9641cc89c7 req-fca9083f-e92a-4712-a6b0-1d2df09fefba service nova] Releasing lock "refresh_cache-fb97d2dd-d42a-42e8-9a36-5c913a58b891" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.638201] env[68569]: DEBUG nova.compute.manager [req-13f24a40-cf8a-4021-a3a5-4d9641cc89c7 req-fca9083f-e92a-4712-a6b0-1d2df09fefba service nova] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Received event network-vif-deleted-ae0d8af2-424e-4a56-8661-6162ef6ba924 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 613.648781] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 613.649075] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 613.649251] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Deleting the datastore file [datastore1] d6c45731-d76a-46cf-9b7d-be035a200948 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 613.649516] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4219dc49-6dc5-45b8-aca4-ead28fcda080 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.660771] env[68569]: DEBUG oslo_vmware.api [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Waiting for the task: (returnval){ [ 613.660771] env[68569]: value = "task-3166498" [ 613.660771] env[68569]: _type = "Task" [ 613.660771] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.681874] env[68569]: DEBUG oslo_vmware.api [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166498, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.687769] env[68569]: INFO nova.compute.manager [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Took 23.91 seconds to build instance. [ 613.688837] env[68569]: DEBUG oslo_vmware.api [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166495, 'name': PowerOnVM_Task, 'duration_secs': 1.382412} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.689660] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 613.689848] env[68569]: INFO nova.compute.manager [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Took 8.97 seconds to spawn the instance on the hypervisor. [ 613.693457] env[68569]: DEBUG nova.compute.manager [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 613.694253] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48ff968-1712-438b-b89e-a63769f3e753 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.704159] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquiring lock "9bb06e82-cc5c-4673-b1f6-aae87568aa9c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.705617] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Lock "9bb06e82-cc5c-4673-b1f6-aae87568aa9c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.864619] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755941e5-4807-41cb-a7a0-3adf885184dc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.875161] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e24b2b-a002-4f3f-a4e1-3c0845a1366f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.911917] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f477a5a-88ee-4cc6-babc-6710869a6a39 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.921829] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3730501-8433-497a-a373-243dd8694608 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.940068] env[68569]: DEBUG nova.compute.provider_tree [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 614.131256] env[68569]: DEBUG nova.compute.manager [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 614.131561] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 614.132625] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1def092c-621d-4345-bea3-3a6199bab971 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.151539] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 614.151539] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c7f9714-5a7d-4b95-9458-a0b6b8c5e22d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.160808] env[68569]: DEBUG oslo_vmware.api [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Waiting for the task: (returnval){ [ 614.160808] env[68569]: value = "task-3166499" [ 614.160808] env[68569]: _type = "Task" [ 614.160808] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.181952] env[68569]: DEBUG oslo_vmware.api [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166499, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.187109] env[68569]: DEBUG oslo_vmware.api [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Task: {'id': task-3166498, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.374956} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.187369] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 614.187575] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 614.187941] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 614.192030] env[68569]: INFO nova.compute.manager [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Took 1.16 seconds to destroy the instance on the hypervisor. [ 614.192030] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 614.192030] env[68569]: DEBUG nova.compute.manager [-] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 614.192030] env[68569]: DEBUG nova.network.neutron [-] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 614.192030] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4a2e753-4469-4d8d-b1da-5c08954e5f72 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Lock "1c8dfb47-df19-4101-8d4e-30889d71d7da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.558s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.224300] env[68569]: INFO nova.compute.manager [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Took 21.48 seconds to build instance. [ 614.492312] env[68569]: DEBUG nova.scheduler.client.report [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 27 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 614.492714] env[68569]: DEBUG nova.compute.provider_tree [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 27 to 28 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 614.492861] env[68569]: DEBUG nova.compute.provider_tree [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 614.672346] env[68569]: DEBUG oslo_vmware.api [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166499, 'name': PowerOffVM_Task, 'duration_secs': 0.263521} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.672707] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 614.672937] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 614.673358] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-901372ad-b931-412d-891a-55d739176747 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.696101] env[68569]: DEBUG nova.compute.manager [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 614.724653] env[68569]: DEBUG oslo_concurrency.lockutils [None req-508e3436-0ec8-4292-9d7b-f00abb6af01d tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.311s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.764217] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 614.764556] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 614.764732] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Deleting the datastore file [datastore2] 8eb18d79-e164-4e66-83b0-7b40d04c30a8 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 614.765397] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0682bae7-34b1-4d1d-8b66-cc9ce67f4e2f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.774171] env[68569]: DEBUG oslo_vmware.api [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Waiting for the task: (returnval){ [ 614.774171] env[68569]: value = "task-3166502" [ 614.774171] env[68569]: _type = "Task" [ 614.774171] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.785904] env[68569]: DEBUG oslo_vmware.api [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166502, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.847631] env[68569]: DEBUG nova.network.neutron [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Successfully updated port: cacf394f-f702-4d94-8010-c6f2d2e14123 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 614.998628] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.737s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.999210] env[68569]: DEBUG nova.compute.manager [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 615.002594] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 615.007027] env[68569]: INFO nova.compute.claims [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 615.132229] env[68569]: DEBUG nova.compute.manager [None req-645a7ea8-4039-478a-a292-bc31adfadfbe tempest-ServerDiagnosticsV248Test-225293956 tempest-ServerDiagnosticsV248Test-225293956-project-admin] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 615.133893] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c248c95c-b315-4dd5-938e-db8837e42d03 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.142801] env[68569]: INFO nova.compute.manager [None req-645a7ea8-4039-478a-a292-bc31adfadfbe tempest-ServerDiagnosticsV248Test-225293956 tempest-ServerDiagnosticsV248Test-225293956-project-admin] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Retrieving diagnostics [ 615.143636] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4136c7-6d66-415b-b64d-c34b4e746fe3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.227031] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.285772] env[68569]: DEBUG oslo_vmware.api [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Task: {'id': task-3166502, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.438386} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.286150] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 615.286496] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 615.286496] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 615.286757] env[68569]: INFO nova.compute.manager [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Took 1.16 seconds to destroy the instance on the hypervisor. [ 615.286960] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 615.287171] env[68569]: DEBUG nova.compute.manager [-] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 615.287292] env[68569]: DEBUG nova.network.neutron [-] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 615.352939] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Acquiring lock "refresh_cache-26625edb-06ca-48cc-aaf1-3c55a6ea942b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.352939] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Acquired lock "refresh_cache-26625edb-06ca-48cc-aaf1-3c55a6ea942b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.353076] env[68569]: DEBUG nova.network.neutron [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 615.372430] env[68569]: DEBUG nova.network.neutron [-] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.514475] env[68569]: DEBUG nova.compute.utils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 615.517884] env[68569]: DEBUG nova.compute.manager [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 615.518072] env[68569]: DEBUG nova.network.neutron [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 615.637747] env[68569]: DEBUG nova.policy [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6198db2a02d4f2980f133f13bafd49e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1de4680b5844def8ff462ea7ef6a25c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 615.875914] env[68569]: INFO nova.compute.manager [-] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Took 1.69 seconds to deallocate network for instance. [ 615.924218] env[68569]: DEBUG nova.compute.manager [req-c5dcb0d1-798d-4a5f-9401-9f9ef3aab029 req-adbffe33-01d9-4196-951a-cfce2e79e3fb service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Received event network-changed-05b63b18-ef8d-4346-992c-880e73eb22d9 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 615.924218] env[68569]: DEBUG nova.compute.manager [req-c5dcb0d1-798d-4a5f-9401-9f9ef3aab029 req-adbffe33-01d9-4196-951a-cfce2e79e3fb service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Refreshing instance network info cache due to event network-changed-05b63b18-ef8d-4346-992c-880e73eb22d9. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 615.926032] env[68569]: DEBUG oslo_concurrency.lockutils [req-c5dcb0d1-798d-4a5f-9401-9f9ef3aab029 req-adbffe33-01d9-4196-951a-cfce2e79e3fb service nova] Acquiring lock "refresh_cache-7129a57f-e639-49ae-96a9-3c1d966034a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.927248] env[68569]: DEBUG oslo_concurrency.lockutils [req-c5dcb0d1-798d-4a5f-9401-9f9ef3aab029 req-adbffe33-01d9-4196-951a-cfce2e79e3fb service nova] Acquired lock "refresh_cache-7129a57f-e639-49ae-96a9-3c1d966034a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.927718] env[68569]: DEBUG nova.network.neutron [req-c5dcb0d1-798d-4a5f-9401-9f9ef3aab029 req-adbffe33-01d9-4196-951a-cfce2e79e3fb service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Refreshing network info cache for port 05b63b18-ef8d-4346-992c-880e73eb22d9 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 615.941124] env[68569]: DEBUG nova.network.neutron [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.021019] env[68569]: DEBUG nova.compute.manager [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 616.312780] env[68569]: DEBUG nova.network.neutron [-] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.388712] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.402070] env[68569]: DEBUG nova.network.neutron [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Updating instance_info_cache with network_info: [{"id": "cacf394f-f702-4d94-8010-c6f2d2e14123", "address": "fa:16:3e:db:9c:48", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcacf394f-f7", "ovs_interfaceid": "cacf394f-f702-4d94-8010-c6f2d2e14123", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.405916] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a56e323-088d-4b30-a8a3-9e93a4e7ad4e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.418718] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f041d764-4a3a-4511-abf5-120c1caa501b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.458999] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46236a1e-b502-41a9-ae7e-a34c5e17d264 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.469913] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61075f4-de4e-4a9b-a50e-a488761f153b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.485920] env[68569]: DEBUG nova.compute.provider_tree [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 616.563247] env[68569]: DEBUG nova.network.neutron [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Successfully created port: b7d8aded-6354-4a88-941e-005173bada5f {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 616.814527] env[68569]: INFO nova.compute.manager [-] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Took 1.53 seconds to deallocate network for instance. [ 616.905817] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Releasing lock "refresh_cache-26625edb-06ca-48cc-aaf1-3c55a6ea942b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 616.906220] env[68569]: DEBUG nova.compute.manager [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Instance network_info: |[{"id": "cacf394f-f702-4d94-8010-c6f2d2e14123", "address": "fa:16:3e:db:9c:48", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcacf394f-f7", "ovs_interfaceid": "cacf394f-f702-4d94-8010-c6f2d2e14123", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 616.910953] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:9c:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cacf394f-f702-4d94-8010-c6f2d2e14123', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 616.917327] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Creating folder: Project (2e7a377a9a354da38e1ed136c3d93d03). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 616.917637] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e76cced-3228-4806-8d4e-4461a1e33377 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.932642] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Created folder: Project (2e7a377a9a354da38e1ed136c3d93d03) in parent group-v633430. [ 616.934023] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Creating folder: Instances. Parent ref: group-v633471. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 616.934023] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1fc53aa-e027-4910-8b67-8b04131b68ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.951438] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Created folder: Instances in parent group-v633471. [ 616.951438] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 616.954746] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 616.954746] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ecac9f3-396c-4c9a-af7a-4fe593a9942b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.977148] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 616.977148] env[68569]: value = "task-3166506" [ 616.977148] env[68569]: _type = "Task" [ 616.977148] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.987588] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166506, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.027025] env[68569]: DEBUG nova.scheduler.client.report [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 28 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 617.027025] env[68569]: DEBUG nova.compute.provider_tree [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 28 to 29 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 617.027025] env[68569]: DEBUG nova.compute.provider_tree [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 617.032290] env[68569]: DEBUG nova.compute.manager [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 617.036339] env[68569]: DEBUG nova.network.neutron [req-c5dcb0d1-798d-4a5f-9401-9f9ef3aab029 req-adbffe33-01d9-4196-951a-cfce2e79e3fb service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Updated VIF entry in instance network info cache for port 05b63b18-ef8d-4346-992c-880e73eb22d9. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 617.037478] env[68569]: DEBUG nova.network.neutron [req-c5dcb0d1-798d-4a5f-9401-9f9ef3aab029 req-adbffe33-01d9-4196-951a-cfce2e79e3fb service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Updating instance_info_cache with network_info: [{"id": "05b63b18-ef8d-4346-992c-880e73eb22d9", "address": "fa:16:3e:b6:d6:de", "network": {"id": "880e856f-aa10-4ece-8287-93035c04f561", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-333722358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "020e9d06d0d148889f55316c2eed3c0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05b63b18-ef", "ovs_interfaceid": "05b63b18-ef8d-4346-992c-880e73eb22d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.068884] env[68569]: DEBUG nova.virt.hardware [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:50:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1026894007',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-91713122',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 617.069660] env[68569]: DEBUG nova.virt.hardware [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 617.069907] env[68569]: DEBUG nova.virt.hardware [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 617.070215] env[68569]: DEBUG nova.virt.hardware [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 617.070412] env[68569]: DEBUG nova.virt.hardware [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 617.070599] env[68569]: DEBUG nova.virt.hardware [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 617.070772] env[68569]: DEBUG nova.virt.hardware [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 617.070942] env[68569]: DEBUG nova.virt.hardware [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 617.071210] env[68569]: DEBUG nova.virt.hardware [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 617.071320] env[68569]: DEBUG nova.virt.hardware [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 617.071535] env[68569]: DEBUG nova.virt.hardware [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 617.072759] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0143dbd2-e0c6-4639-b5c9-cc024e3ec5ab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.086558] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4475d3-9bae-46a1-9888-0534843c947d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.324305] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.490398] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166506, 'name': CreateVM_Task, 'duration_secs': 0.390449} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.490521] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 617.491376] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.491565] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.492072] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 617.492861] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-730a919c-61a8-4935-8e61-2843189a7a2a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.511681] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Waiting for the task: (returnval){ [ 617.511681] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525a9c3d-4b83-1c29-0ee1-4423494c5299" [ 617.511681] env[68569]: _type = "Task" [ 617.511681] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.526757] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525a9c3d-4b83-1c29-0ee1-4423494c5299, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.543884] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.541s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 617.544426] env[68569]: DEBUG nova.compute.manager [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 617.550301] env[68569]: DEBUG oslo_concurrency.lockutils [req-c5dcb0d1-798d-4a5f-9401-9f9ef3aab029 req-adbffe33-01d9-4196-951a-cfce2e79e3fb service nova] Releasing lock "refresh_cache-7129a57f-e639-49ae-96a9-3c1d966034a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.551255] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.492s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 617.552804] env[68569]: INFO nova.compute.claims [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.004257] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Acquiring lock "7693ef68-d7e5-4899-9615-9f2a1dd0bce8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.004257] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Lock "7693ef68-d7e5-4899-9615-9f2a1dd0bce8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.004257] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Acquiring lock "7693ef68-d7e5-4899-9615-9f2a1dd0bce8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.004257] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Lock "7693ef68-d7e5-4899-9615-9f2a1dd0bce8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.004499] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Lock "7693ef68-d7e5-4899-9615-9f2a1dd0bce8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.005375] env[68569]: INFO nova.compute.manager [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Terminating instance [ 618.027570] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525a9c3d-4b83-1c29-0ee1-4423494c5299, 'name': SearchDatastore_Task, 'duration_secs': 0.037973} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.028119] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.028458] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 618.028794] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.029051] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.029410] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 618.029827] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-976759d4-5c6e-41a8-b77f-e32b5f3454fa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.044198] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 618.044198] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 618.044198] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e0623a5-bbe3-4323-a91d-49529f5c83ef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.052217] env[68569]: DEBUG nova.compute.utils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 618.054140] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Waiting for the task: (returnval){ [ 618.054140] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522b7582-2f15-9be2-07cf-773aff7b3f15" [ 618.054140] env[68569]: _type = "Task" [ 618.054140] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.054828] env[68569]: DEBUG nova.compute.manager [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 618.055105] env[68569]: DEBUG nova.network.neutron [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 618.071662] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522b7582-2f15-9be2-07cf-773aff7b3f15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.212373] env[68569]: DEBUG nova.policy [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a36b79ccf66748b8947de41b13658722', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b89a0650f57445abafb21ac1186b86c6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 618.510568] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Acquiring lock "refresh_cache-7693ef68-d7e5-4899-9615-9f2a1dd0bce8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.510854] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Acquired lock "refresh_cache-7693ef68-d7e5-4899-9615-9f2a1dd0bce8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.510938] env[68569]: DEBUG nova.network.neutron [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 618.555800] env[68569]: DEBUG nova.compute.manager [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 618.575778] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522b7582-2f15-9be2-07cf-773aff7b3f15, 'name': SearchDatastore_Task, 'duration_secs': 0.036633} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.576986] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6448cefe-3999-42f6-b38a-48dba3519cba {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.585749] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Waiting for the task: (returnval){ [ 618.585749] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ed9ac9-1f99-0728-3995-abf1ad369a95" [ 618.585749] env[68569]: _type = "Task" [ 618.585749] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.597094] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ed9ac9-1f99-0728-3995-abf1ad369a95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.919826] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcdcb44d-cf75-46a5-a7bf-74be605459cb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.928528] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95389ac-c84a-41ab-837e-46ef200dbba4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.960188] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b93f278-53bb-4cfe-acca-c4fbbd73deba {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.969695] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e0f4ad-9b1f-454a-bf27-20b1c64f3c10 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.987524] env[68569]: DEBUG nova.compute.provider_tree [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.007520] env[68569]: DEBUG nova.compute.manager [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Received event network-vif-plugged-cacf394f-f702-4d94-8010-c6f2d2e14123 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 619.008167] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] Acquiring lock "26625edb-06ca-48cc-aaf1-3c55a6ea942b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.008276] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] Lock "26625edb-06ca-48cc-aaf1-3c55a6ea942b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.009038] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] Lock "26625edb-06ca-48cc-aaf1-3c55a6ea942b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.009038] env[68569]: DEBUG nova.compute.manager [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] No waiting events found dispatching network-vif-plugged-cacf394f-f702-4d94-8010-c6f2d2e14123 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 619.009038] env[68569]: WARNING nova.compute.manager [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Received unexpected event network-vif-plugged-cacf394f-f702-4d94-8010-c6f2d2e14123 for instance with vm_state building and task_state spawning. [ 619.009229] env[68569]: DEBUG nova.compute.manager [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Received event network-changed-cacf394f-f702-4d94-8010-c6f2d2e14123 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 619.009405] env[68569]: DEBUG nova.compute.manager [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Refreshing instance network info cache due to event network-changed-cacf394f-f702-4d94-8010-c6f2d2e14123. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 619.009647] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] Acquiring lock "refresh_cache-26625edb-06ca-48cc-aaf1-3c55a6ea942b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.009890] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] Acquired lock "refresh_cache-26625edb-06ca-48cc-aaf1-3c55a6ea942b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.010133] env[68569]: DEBUG nova.network.neutron [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Refreshing network info cache for port cacf394f-f702-4d94-8010-c6f2d2e14123 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 619.066060] env[68569]: DEBUG nova.network.neutron [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 619.100191] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ed9ac9-1f99-0728-3995-abf1ad369a95, 'name': SearchDatastore_Task, 'duration_secs': 0.020225} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.100588] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.102301] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 26625edb-06ca-48cc-aaf1-3c55a6ea942b/26625edb-06ca-48cc-aaf1-3c55a6ea942b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 619.102301] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9706cbc-3492-4951-850e-3f2dd0141102 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.110210] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Waiting for the task: (returnval){ [ 619.110210] env[68569]: value = "task-3166508" [ 619.110210] env[68569]: _type = "Task" [ 619.110210] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.121734] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166508, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.275130] env[68569]: DEBUG nova.network.neutron [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.353163] env[68569]: DEBUG nova.compute.manager [req-c5f37636-aad9-4f6f-b73a-80e660b68be2 req-e2f619e4-1b57-45d1-a1b7-a466b00d0f2e service nova] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Received event network-changed-7d78a65a-5ed7-419c-b054-a78d424ca795 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 619.353886] env[68569]: DEBUG nova.compute.manager [req-c5f37636-aad9-4f6f-b73a-80e660b68be2 req-e2f619e4-1b57-45d1-a1b7-a466b00d0f2e service nova] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Refreshing instance network info cache due to event network-changed-7d78a65a-5ed7-419c-b054-a78d424ca795. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 619.355191] env[68569]: DEBUG oslo_concurrency.lockutils [req-c5f37636-aad9-4f6f-b73a-80e660b68be2 req-e2f619e4-1b57-45d1-a1b7-a466b00d0f2e service nova] Acquiring lock "refresh_cache-fb97d2dd-d42a-42e8-9a36-5c913a58b891" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.355421] env[68569]: DEBUG oslo_concurrency.lockutils [req-c5f37636-aad9-4f6f-b73a-80e660b68be2 req-e2f619e4-1b57-45d1-a1b7-a466b00d0f2e service nova] Acquired lock "refresh_cache-fb97d2dd-d42a-42e8-9a36-5c913a58b891" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.355634] env[68569]: DEBUG nova.network.neutron [req-c5f37636-aad9-4f6f-b73a-80e660b68be2 req-e2f619e4-1b57-45d1-a1b7-a466b00d0f2e service nova] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Refreshing network info cache for port 7d78a65a-5ed7-419c-b054-a78d424ca795 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 619.493563] env[68569]: DEBUG nova.scheduler.client.report [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 619.570614] env[68569]: DEBUG nova.compute.manager [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 619.611019] env[68569]: DEBUG nova.virt.hardware [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 619.611019] env[68569]: DEBUG nova.virt.hardware [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 619.611019] env[68569]: DEBUG nova.virt.hardware [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 619.611337] env[68569]: DEBUG nova.virt.hardware [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 619.611337] env[68569]: DEBUG nova.virt.hardware [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 619.611537] env[68569]: DEBUG nova.virt.hardware [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 619.612149] env[68569]: DEBUG nova.virt.hardware [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 619.612257] env[68569]: DEBUG nova.virt.hardware [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 619.612695] env[68569]: DEBUG nova.virt.hardware [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 619.612837] env[68569]: DEBUG nova.virt.hardware [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 619.613043] env[68569]: DEBUG nova.virt.hardware [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 619.615357] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-553662f9-12e2-44d9-a766-9aa03b5bbb53 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.636653] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f9635d-c716-4395-bfbb-8e6449bc5222 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.643042] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166508, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.666788] env[68569]: DEBUG nova.network.neutron [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Successfully created port: 39d832b9-c266-46b1-9633-7204b1095ba5 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 619.699970] env[68569]: DEBUG nova.network.neutron [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Successfully updated port: b7d8aded-6354-4a88-941e-005173bada5f {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 619.780452] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Releasing lock "refresh_cache-7693ef68-d7e5-4899-9615-9f2a1dd0bce8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.780785] env[68569]: DEBUG nova.compute.manager [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 619.781032] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 619.782490] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71eddda7-f92a-4d48-881c-ff7cf1f462ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.794863] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 619.795392] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d4d79b8-c519-40ee-8aa0-4af64a89d302 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.808284] env[68569]: DEBUG oslo_vmware.api [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Waiting for the task: (returnval){ [ 619.808284] env[68569]: value = "task-3166509" [ 619.808284] env[68569]: _type = "Task" [ 619.808284] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.820434] env[68569]: DEBUG oslo_vmware.api [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166509, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.998619] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.447s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 620.000012] env[68569]: DEBUG nova.compute.manager [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 620.004739] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.573s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 620.009501] env[68569]: INFO nova.compute.claims [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 620.130290] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166508, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.775867} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.130632] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 26625edb-06ca-48cc-aaf1-3c55a6ea942b/26625edb-06ca-48cc-aaf1-3c55a6ea942b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 620.130853] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 620.131246] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e45f31ae-7011-47db-b9f2-bddb56e2a50b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.140403] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Waiting for the task: (returnval){ [ 620.140403] env[68569]: value = "task-3166510" [ 620.140403] env[68569]: _type = "Task" [ 620.140403] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.152832] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166510, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.204687] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "refresh_cache-e77cc179-1f3d-4095-a491-48df7f79bdb9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.204687] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquired lock "refresh_cache-e77cc179-1f3d-4095-a491-48df7f79bdb9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.204687] env[68569]: DEBUG nova.network.neutron [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 620.324873] env[68569]: DEBUG oslo_vmware.api [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166509, 'name': PowerOffVM_Task, 'duration_secs': 0.194316} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.325174] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 620.325381] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 620.326420] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ddcf4af4-5ce5-4985-a69d-9d6061184bd5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.360518] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 620.360742] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 620.360924] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Deleting the datastore file [datastore1] 7693ef68-d7e5-4899-9615-9f2a1dd0bce8 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 620.361211] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fad7798f-0b7e-4548-9c8a-dc9ccb6dad6b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.369675] env[68569]: DEBUG oslo_vmware.api [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Waiting for the task: (returnval){ [ 620.369675] env[68569]: value = "task-3166512" [ 620.369675] env[68569]: _type = "Task" [ 620.369675] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.379587] env[68569]: DEBUG oslo_vmware.api [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166512, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.490207] env[68569]: DEBUG nova.network.neutron [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Updated VIF entry in instance network info cache for port cacf394f-f702-4d94-8010-c6f2d2e14123. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 620.490692] env[68569]: DEBUG nova.network.neutron [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Updating instance_info_cache with network_info: [{"id": "cacf394f-f702-4d94-8010-c6f2d2e14123", "address": "fa:16:3e:db:9c:48", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcacf394f-f7", "ovs_interfaceid": "cacf394f-f702-4d94-8010-c6f2d2e14123", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.513030] env[68569]: DEBUG nova.compute.utils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 620.518033] env[68569]: DEBUG nova.compute.manager [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 620.518033] env[68569]: DEBUG nova.network.neutron [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 620.523640] env[68569]: DEBUG nova.network.neutron [req-c5f37636-aad9-4f6f-b73a-80e660b68be2 req-e2f619e4-1b57-45d1-a1b7-a466b00d0f2e service nova] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Updated VIF entry in instance network info cache for port 7d78a65a-5ed7-419c-b054-a78d424ca795. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 620.527021] env[68569]: DEBUG nova.network.neutron [req-c5f37636-aad9-4f6f-b73a-80e660b68be2 req-e2f619e4-1b57-45d1-a1b7-a466b00d0f2e service nova] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Updating instance_info_cache with network_info: [{"id": "7d78a65a-5ed7-419c-b054-a78d424ca795", "address": "fa:16:3e:f7:35:b8", "network": {"id": "d6edc444-be83-4191-989a-6abe41074736", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-628698740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d628075adbb43b8a572072277c25741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d78a65a-5e", "ovs_interfaceid": "7d78a65a-5ed7-419c-b054-a78d424ca795", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.655706] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166510, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123584} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.656104] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 620.657347] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58866a2-0abe-4ead-bf0b-7b6f0f7ac39b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.685308] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 26625edb-06ca-48cc-aaf1-3c55a6ea942b/26625edb-06ca-48cc-aaf1-3c55a6ea942b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 620.685995] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3864e931-7698-437e-a755-53da5cf364b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.710755] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Waiting for the task: (returnval){ [ 620.710755] env[68569]: value = "task-3166513" [ 620.710755] env[68569]: _type = "Task" [ 620.710755] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.726559] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166513, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.880477] env[68569]: DEBUG oslo_vmware.api [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Task: {'id': task-3166512, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.308653} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.880743] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 620.880957] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 620.881160] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 620.881336] env[68569]: INFO nova.compute.manager [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Took 1.10 seconds to destroy the instance on the hypervisor. [ 620.881576] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 620.881759] env[68569]: DEBUG nova.compute.manager [-] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 620.881856] env[68569]: DEBUG nova.network.neutron [-] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 620.931263] env[68569]: DEBUG nova.policy [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e8d626b0af64c3b93283e16d2b64760', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de1ec9d4fc3e45afb009f75ac86d5f05', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 620.994514] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] Releasing lock "refresh_cache-26625edb-06ca-48cc-aaf1-3c55a6ea942b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.994908] env[68569]: DEBUG nova.compute.manager [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Received event network-vif-deleted-f82cdc68-b219-4ba1-8786-d0f6ca06da82 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 620.995136] env[68569]: DEBUG nova.compute.manager [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Received event network-changed-bad77068-318c-4c21-8d0f-74d1c5d8da7b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 620.995920] env[68569]: DEBUG nova.compute.manager [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Refreshing instance network info cache due to event network-changed-bad77068-318c-4c21-8d0f-74d1c5d8da7b. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 620.995920] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] Acquiring lock "refresh_cache-98d5c760-6da3-49e3-af47-20a8054971f3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.995920] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] Acquired lock "refresh_cache-98d5c760-6da3-49e3-af47-20a8054971f3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.995920] env[68569]: DEBUG nova.network.neutron [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Refreshing network info cache for port bad77068-318c-4c21-8d0f-74d1c5d8da7b {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 621.000009] env[68569]: DEBUG nova.network.neutron [-] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.013212] env[68569]: DEBUG nova.network.neutron [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.022397] env[68569]: DEBUG nova.compute.manager [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 621.035218] env[68569]: DEBUG oslo_concurrency.lockutils [req-c5f37636-aad9-4f6f-b73a-80e660b68be2 req-e2f619e4-1b57-45d1-a1b7-a466b00d0f2e service nova] Releasing lock "refresh_cache-fb97d2dd-d42a-42e8-9a36-5c913a58b891" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.230355] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166513, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.346237] env[68569]: DEBUG nova.network.neutron [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Updating instance_info_cache with network_info: [{"id": "b7d8aded-6354-4a88-941e-005173bada5f", "address": "fa:16:3e:ef:31:49", "network": {"id": "1ff7b043-4f7f-405a-b1d8-d45bcd02c339", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-521857199-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1de4680b5844def8ff462ea7ef6a25c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7d8aded-63", "ovs_interfaceid": "b7d8aded-6354-4a88-941e-005173bada5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.462746] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ec8c3a-49ef-4ccc-a990-75cc3324739f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.471224] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-729e8c21-283e-47c1-891a-8c9114f81b7e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.513831] env[68569]: DEBUG nova.network.neutron [-] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.515773] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a440bc3-feac-4045-a1a7-7f048fb1ec08 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.526190] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5eed1a-24e4-4685-a7f0-34a34c76e63b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.543931] env[68569]: DEBUG nova.compute.provider_tree [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.725098] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166513, 'name': ReconfigVM_Task, 'duration_secs': 0.706632} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.725098] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 26625edb-06ca-48cc-aaf1-3c55a6ea942b/26625edb-06ca-48cc-aaf1-3c55a6ea942b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 621.725715] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-967dfb9d-3f13-440d-9791-6f6fcb7dcbcd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.733549] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Waiting for the task: (returnval){ [ 621.733549] env[68569]: value = "task-3166514" [ 621.733549] env[68569]: _type = "Task" [ 621.733549] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.744946] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166514, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.851894] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Releasing lock "refresh_cache-e77cc179-1f3d-4095-a491-48df7f79bdb9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.852040] env[68569]: DEBUG nova.compute.manager [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Instance network_info: |[{"id": "b7d8aded-6354-4a88-941e-005173bada5f", "address": "fa:16:3e:ef:31:49", "network": {"id": "1ff7b043-4f7f-405a-b1d8-d45bcd02c339", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-521857199-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1de4680b5844def8ff462ea7ef6a25c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7d8aded-63", "ovs_interfaceid": "b7d8aded-6354-4a88-941e-005173bada5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 621.853035] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:31:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46785c9c-8b22-487d-a854-b3e67c5ed1d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7d8aded-6354-4a88-941e-005173bada5f', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 621.861520] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Creating folder: Project (f1de4680b5844def8ff462ea7ef6a25c). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 621.861520] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c87962d0-a099-4587-9757-b70949211452 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.877897] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Created folder: Project (f1de4680b5844def8ff462ea7ef6a25c) in parent group-v633430. [ 621.878901] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Creating folder: Instances. Parent ref: group-v633475. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 621.879225] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33eebc99-8a92-43bf-9a4e-9109ae83bd3a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.893637] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Created folder: Instances in parent group-v633475. [ 621.893888] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 621.894112] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 621.894332] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1104f07a-6c5e-4b12-a7f7-bbe2243eb07f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.919670] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 621.919670] env[68569]: value = "task-3166517" [ 621.919670] env[68569]: _type = "Task" [ 621.919670] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.929024] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166517, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.019943] env[68569]: INFO nova.compute.manager [-] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Took 1.14 seconds to deallocate network for instance. [ 622.047333] env[68569]: DEBUG nova.compute.manager [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 622.052027] env[68569]: DEBUG nova.scheduler.client.report [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 622.083327] env[68569]: DEBUG nova.virt.hardware [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 622.083600] env[68569]: DEBUG nova.virt.hardware [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 622.083758] env[68569]: DEBUG nova.virt.hardware [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 622.084022] env[68569]: DEBUG nova.virt.hardware [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 622.084199] env[68569]: DEBUG nova.virt.hardware [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 622.084396] env[68569]: DEBUG nova.virt.hardware [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 622.084595] env[68569]: DEBUG nova.virt.hardware [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 622.084804] env[68569]: DEBUG nova.virt.hardware [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 622.084995] env[68569]: DEBUG nova.virt.hardware [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 622.085178] env[68569]: DEBUG nova.virt.hardware [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 622.085365] env[68569]: DEBUG nova.virt.hardware [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 622.086606] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f427598f-2915-425c-a7b6-4b3df66910e0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.097332] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47414bf7-20ca-48d8-bd7f-158c61f20c0e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.103047] env[68569]: DEBUG nova.network.neutron [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Updated VIF entry in instance network info cache for port bad77068-318c-4c21-8d0f-74d1c5d8da7b. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 622.103422] env[68569]: DEBUG nova.network.neutron [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Updating instance_info_cache with network_info: [{"id": "bad77068-318c-4c21-8d0f-74d1c5d8da7b", "address": "fa:16:3e:04:a0:b4", "network": {"id": "c61128ba-2dd2-405e-89e7-2b5ff8a8022c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-285039859-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc479c8ce67f4cbebb03cc3df42c900d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbad77068-31", "ovs_interfaceid": "bad77068-318c-4c21-8d0f-74d1c5d8da7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.247728] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166514, 'name': Rename_Task, 'duration_secs': 0.163174} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.248187] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 622.248561] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-63366393-b273-42c8-a871-1ddb3eccafe3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.257378] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Waiting for the task: (returnval){ [ 622.257378] env[68569]: value = "task-3166518" [ 622.257378] env[68569]: _type = "Task" [ 622.257378] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.267116] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166518, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.328316] env[68569]: DEBUG nova.network.neutron [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Successfully updated port: 39d832b9-c266-46b1-9633-7204b1095ba5 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 622.435366] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166517, 'name': CreateVM_Task, 'duration_secs': 0.42708} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.435366] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 622.436607] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.436813] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.437016] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 622.437298] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be606604-c82c-4991-ad03-d8131496bd3f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.445644] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 622.445644] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523bb1ac-6215-7436-fe1d-401b79dcfa11" [ 622.445644] env[68569]: _type = "Task" [ 622.445644] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.457978] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523bb1ac-6215-7436-fe1d-401b79dcfa11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.491971] env[68569]: DEBUG nova.compute.manager [req-a2677aef-6f21-4770-aa15-d588696d19e1 req-7d6c73f2-b826-42f0-a6b7-1c1f5ef41331 service nova] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Received event network-vif-plugged-b7d8aded-6354-4a88-941e-005173bada5f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 622.492349] env[68569]: DEBUG oslo_concurrency.lockutils [req-a2677aef-6f21-4770-aa15-d588696d19e1 req-7d6c73f2-b826-42f0-a6b7-1c1f5ef41331 service nova] Acquiring lock "e77cc179-1f3d-4095-a491-48df7f79bdb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.492693] env[68569]: DEBUG oslo_concurrency.lockutils [req-a2677aef-6f21-4770-aa15-d588696d19e1 req-7d6c73f2-b826-42f0-a6b7-1c1f5ef41331 service nova] Lock "e77cc179-1f3d-4095-a491-48df7f79bdb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.493110] env[68569]: DEBUG oslo_concurrency.lockutils [req-a2677aef-6f21-4770-aa15-d588696d19e1 req-7d6c73f2-b826-42f0-a6b7-1c1f5ef41331 service nova] Lock "e77cc179-1f3d-4095-a491-48df7f79bdb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.494335] env[68569]: DEBUG nova.compute.manager [req-a2677aef-6f21-4770-aa15-d588696d19e1 req-7d6c73f2-b826-42f0-a6b7-1c1f5ef41331 service nova] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] No waiting events found dispatching network-vif-plugged-b7d8aded-6354-4a88-941e-005173bada5f {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 622.497492] env[68569]: WARNING nova.compute.manager [req-a2677aef-6f21-4770-aa15-d588696d19e1 req-7d6c73f2-b826-42f0-a6b7-1c1f5ef41331 service nova] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Received unexpected event network-vif-plugged-b7d8aded-6354-4a88-941e-005173bada5f for instance with vm_state building and task_state spawning. [ 622.497492] env[68569]: DEBUG nova.compute.manager [req-a2677aef-6f21-4770-aa15-d588696d19e1 req-7d6c73f2-b826-42f0-a6b7-1c1f5ef41331 service nova] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Received event network-changed-b7d8aded-6354-4a88-941e-005173bada5f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 622.497492] env[68569]: DEBUG nova.compute.manager [req-a2677aef-6f21-4770-aa15-d588696d19e1 req-7d6c73f2-b826-42f0-a6b7-1c1f5ef41331 service nova] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Refreshing instance network info cache due to event network-changed-b7d8aded-6354-4a88-941e-005173bada5f. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 622.497492] env[68569]: DEBUG oslo_concurrency.lockutils [req-a2677aef-6f21-4770-aa15-d588696d19e1 req-7d6c73f2-b826-42f0-a6b7-1c1f5ef41331 service nova] Acquiring lock "refresh_cache-e77cc179-1f3d-4095-a491-48df7f79bdb9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.497492] env[68569]: DEBUG oslo_concurrency.lockutils [req-a2677aef-6f21-4770-aa15-d588696d19e1 req-7d6c73f2-b826-42f0-a6b7-1c1f5ef41331 service nova] Acquired lock "refresh_cache-e77cc179-1f3d-4095-a491-48df7f79bdb9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.497814] env[68569]: DEBUG nova.network.neutron [req-a2677aef-6f21-4770-aa15-d588696d19e1 req-7d6c73f2-b826-42f0-a6b7-1c1f5ef41331 service nova] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Refreshing network info cache for port b7d8aded-6354-4a88-941e-005173bada5f {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 622.528335] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.559053] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.553s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.559053] env[68569]: DEBUG nova.compute.manager [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 622.561717] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.764s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.565498] env[68569]: INFO nova.compute.claims [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 622.612266] env[68569]: DEBUG nova.network.neutron [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Successfully created port: 17651803-ec04-4913-b621-e685c574de0c {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 622.615483] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] Releasing lock "refresh_cache-98d5c760-6da3-49e3-af47-20a8054971f3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.615757] env[68569]: DEBUG nova.compute.manager [req-2e7a3051-8d50-4f86-953d-0534663a4db1 req-e50e0eca-e9f5-460e-b069-672db5a65564 service nova] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Received event network-vif-deleted-0145d676-3585-4d48-ac88-bca4be5f193a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 622.774434] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166518, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.834352] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Acquiring lock "refresh_cache-50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.834352] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Acquired lock "refresh_cache-50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.834352] env[68569]: DEBUG nova.network.neutron [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 622.944530] env[68569]: DEBUG nova.compute.manager [req-a646e88b-c7eb-46ec-a0d3-d99a6d79d132 req-ce0bd160-4099-4399-8add-73ae800e002c service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Received event network-changed-05b63b18-ef8d-4346-992c-880e73eb22d9 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 622.944875] env[68569]: DEBUG nova.compute.manager [req-a646e88b-c7eb-46ec-a0d3-d99a6d79d132 req-ce0bd160-4099-4399-8add-73ae800e002c service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Refreshing instance network info cache due to event network-changed-05b63b18-ef8d-4346-992c-880e73eb22d9. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 622.945036] env[68569]: DEBUG oslo_concurrency.lockutils [req-a646e88b-c7eb-46ec-a0d3-d99a6d79d132 req-ce0bd160-4099-4399-8add-73ae800e002c service nova] Acquiring lock "refresh_cache-7129a57f-e639-49ae-96a9-3c1d966034a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.945212] env[68569]: DEBUG oslo_concurrency.lockutils [req-a646e88b-c7eb-46ec-a0d3-d99a6d79d132 req-ce0bd160-4099-4399-8add-73ae800e002c service nova] Acquired lock "refresh_cache-7129a57f-e639-49ae-96a9-3c1d966034a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.945692] env[68569]: DEBUG nova.network.neutron [req-a646e88b-c7eb-46ec-a0d3-d99a6d79d132 req-ce0bd160-4099-4399-8add-73ae800e002c service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Refreshing network info cache for port 05b63b18-ef8d-4346-992c-880e73eb22d9 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 622.966949] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523bb1ac-6215-7436-fe1d-401b79dcfa11, 'name': SearchDatastore_Task, 'duration_secs': 0.0196} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.967396] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.967537] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 622.967898] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.967898] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.968152] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 622.968468] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1ecd1bd-b39c-41a5-b5a3-8ce4fd89b46b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.979941] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 622.980171] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 622.980903] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bae68943-bd52-405d-b8de-5b46f432acbe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.987375] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 622.987375] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ece665-c3de-0045-1b80-a706e718a7bf" [ 622.987375] env[68569]: _type = "Task" [ 622.987375] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.995775] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ece665-c3de-0045-1b80-a706e718a7bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.076034] env[68569]: DEBUG nova.compute.utils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 623.076034] env[68569]: DEBUG nova.compute.manager [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 623.076034] env[68569]: DEBUG nova.network.neutron [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 623.232789] env[68569]: DEBUG nova.policy [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f23ccc9a0c054d6db6bd439e15b1e5ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d69fb55927344ad96aa0bf26f8f230e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 623.271629] env[68569]: DEBUG oslo_vmware.api [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166518, 'name': PowerOnVM_Task, 'duration_secs': 0.707957} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.272238] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 623.272676] env[68569]: INFO nova.compute.manager [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Took 10.98 seconds to spawn the instance on the hypervisor. [ 623.272974] env[68569]: DEBUG nova.compute.manager [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 623.273934] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea21b12-0f61-4d85-a756-29beec101441 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.471683] env[68569]: DEBUG nova.network.neutron [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.503952] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ece665-c3de-0045-1b80-a706e718a7bf, 'name': SearchDatastore_Task, 'duration_secs': 0.01112} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.506765] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a87b009d-35c6-4985-ae47-f82f7aef7cac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.514171] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 623.514171] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527b2a7c-473b-75ed-89cb-a6cc64967590" [ 623.514171] env[68569]: _type = "Task" [ 623.514171] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.526940] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527b2a7c-473b-75ed-89cb-a6cc64967590, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.581057] env[68569]: DEBUG nova.compute.manager [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 623.798849] env[68569]: INFO nova.compute.manager [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Took 22.45 seconds to build instance. [ 623.931144] env[68569]: DEBUG nova.network.neutron [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Updating instance_info_cache with network_info: [{"id": "39d832b9-c266-46b1-9633-7204b1095ba5", "address": "fa:16:3e:43:8c:69", "network": {"id": "28a77754-516d-48cc-8a3e-a644b6fe8a04", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-714835425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b89a0650f57445abafb21ac1186b86c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39d832b9-c2", "ovs_interfaceid": "39d832b9-c266-46b1-9633-7204b1095ba5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.027113] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527b2a7c-473b-75ed-89cb-a6cc64967590, 'name': SearchDatastore_Task, 'duration_secs': 0.028107} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.027603] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.027984] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] e77cc179-1f3d-4095-a491-48df7f79bdb9/e77cc179-1f3d-4095-a491-48df7f79bdb9.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 624.028280] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0d3c24e-15be-4c6e-b191-a6feed3f8245 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.032641] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ae8a97-a211-4f1e-b6f2-a604646876a2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.040827] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa1f397-2684-41c6-8bb8-206ca8689677 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.045338] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 624.045338] env[68569]: value = "task-3166520" [ 624.045338] env[68569]: _type = "Task" [ 624.045338] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.079016] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75704ac-d275-4c77-9b51-23eb4efb7818 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.087343] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166520, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.095223] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09315dab-37c6-4ad2-bbdb-aec2c141caf1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.112515] env[68569]: DEBUG nova.compute.provider_tree [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.195124] env[68569]: DEBUG nova.network.neutron [req-a2677aef-6f21-4770-aa15-d588696d19e1 req-7d6c73f2-b826-42f0-a6b7-1c1f5ef41331 service nova] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Updated VIF entry in instance network info cache for port b7d8aded-6354-4a88-941e-005173bada5f. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 624.196035] env[68569]: DEBUG nova.network.neutron [req-a2677aef-6f21-4770-aa15-d588696d19e1 req-7d6c73f2-b826-42f0-a6b7-1c1f5ef41331 service nova] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Updating instance_info_cache with network_info: [{"id": "b7d8aded-6354-4a88-941e-005173bada5f", "address": "fa:16:3e:ef:31:49", "network": {"id": "1ff7b043-4f7f-405a-b1d8-d45bcd02c339", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-521857199-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1de4680b5844def8ff462ea7ef6a25c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7d8aded-63", "ovs_interfaceid": "b7d8aded-6354-4a88-941e-005173bada5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.300895] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a77e674-c55a-439d-adae-408cf31c59fb tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Lock "26625edb-06ca-48cc-aaf1-3c55a6ea942b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.011s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.437283] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Releasing lock "refresh_cache-50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.437643] env[68569]: DEBUG nova.compute.manager [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Instance network_info: |[{"id": "39d832b9-c266-46b1-9633-7204b1095ba5", "address": "fa:16:3e:43:8c:69", "network": {"id": "28a77754-516d-48cc-8a3e-a644b6fe8a04", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-714835425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b89a0650f57445abafb21ac1186b86c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39d832b9-c2", "ovs_interfaceid": "39d832b9-c266-46b1-9633-7204b1095ba5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 624.438103] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:8c:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0685bd0b-3dbf-4a06-951c-c6a4726dd4b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39d832b9-c266-46b1-9633-7204b1095ba5', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 624.450126] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Creating folder: Project (b89a0650f57445abafb21ac1186b86c6). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 624.452663] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9cc602bc-fed1-4c6e-af08-d51101fc9555 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.473114] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Created folder: Project (b89a0650f57445abafb21ac1186b86c6) in parent group-v633430. [ 624.473357] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Creating folder: Instances. Parent ref: group-v633478. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 624.473611] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f8e08b23-e14c-4a52-9efe-768c2be90539 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.488822] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Created folder: Instances in parent group-v633478. [ 624.489209] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 624.489310] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 624.489522] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c1e7f64a-80d9-4abd-bbc9-acbc9e09df1a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.514215] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 624.514215] env[68569]: value = "task-3166523" [ 624.514215] env[68569]: _type = "Task" [ 624.514215] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.526045] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166523, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.561156] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166520, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507407} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.562109] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] e77cc179-1f3d-4095-a491-48df7f79bdb9/e77cc179-1f3d-4095-a491-48df7f79bdb9.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 624.562109] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 624.562109] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a716d626-50c3-4ca1-81f8-30f3a60d4f68 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.569671] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 624.569671] env[68569]: value = "task-3166525" [ 624.569671] env[68569]: _type = "Task" [ 624.569671] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.585022] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166525, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.594731] env[68569]: DEBUG nova.compute.manager [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 624.622092] env[68569]: DEBUG nova.scheduler.client.report [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 624.640669] env[68569]: DEBUG nova.virt.hardware [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 624.640938] env[68569]: DEBUG nova.virt.hardware [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 624.641114] env[68569]: DEBUG nova.virt.hardware [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 624.642522] env[68569]: DEBUG nova.virt.hardware [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 624.642522] env[68569]: DEBUG nova.virt.hardware [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 624.642522] env[68569]: DEBUG nova.virt.hardware [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 624.642522] env[68569]: DEBUG nova.virt.hardware [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 624.642522] env[68569]: DEBUG nova.virt.hardware [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 624.642749] env[68569]: DEBUG nova.virt.hardware [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 624.642749] env[68569]: DEBUG nova.virt.hardware [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 624.642749] env[68569]: DEBUG nova.virt.hardware [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 624.643835] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd5f9a5-8e0e-4056-b626-f3295a7aef42 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.660630] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f744a6c-f33d-4ac5-8ba5-5fd1877f19c3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.699182] env[68569]: DEBUG oslo_concurrency.lockutils [req-a2677aef-6f21-4770-aa15-d588696d19e1 req-7d6c73f2-b826-42f0-a6b7-1c1f5ef41331 service nova] Releasing lock "refresh_cache-e77cc179-1f3d-4095-a491-48df7f79bdb9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.014638] env[68569]: DEBUG nova.network.neutron [req-a646e88b-c7eb-46ec-a0d3-d99a6d79d132 req-ce0bd160-4099-4399-8add-73ae800e002c service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Updated VIF entry in instance network info cache for port 05b63b18-ef8d-4346-992c-880e73eb22d9. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 625.015136] env[68569]: DEBUG nova.network.neutron [req-a646e88b-c7eb-46ec-a0d3-d99a6d79d132 req-ce0bd160-4099-4399-8add-73ae800e002c service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Updating instance_info_cache with network_info: [{"id": "05b63b18-ef8d-4346-992c-880e73eb22d9", "address": "fa:16:3e:b6:d6:de", "network": {"id": "880e856f-aa10-4ece-8287-93035c04f561", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-333722358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "020e9d06d0d148889f55316c2eed3c0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05b63b18-ef", "ovs_interfaceid": "05b63b18-ef8d-4346-992c-880e73eb22d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.031830] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166523, 'name': CreateVM_Task, 'duration_secs': 0.382215} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.032742] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 625.034155] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.034155] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.034155] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 625.034645] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a4897de-b3ed-40c3-989d-e7c68cb4d967 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.040911] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Waiting for the task: (returnval){ [ 625.040911] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5275b08c-8647-0f0c-3d44-dff8cb5be7eb" [ 625.040911] env[68569]: _type = "Task" [ 625.040911] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.055083] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5275b08c-8647-0f0c-3d44-dff8cb5be7eb, 'name': SearchDatastore_Task, 'duration_secs': 0.010354} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.055440] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.055684] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 625.055917] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.056077] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.056259] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 625.056564] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd0e7710-dfd5-483d-b804-eb17d601d44f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.075623] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 625.075888] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 625.077235] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df7c2fec-fd8b-4071-8c06-f62a5a5fe840 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.086147] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166525, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093298} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.086791] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 625.087620] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32dcb330-7c2a-4623-9e90-1fa1e19d8030 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.092400] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Waiting for the task: (returnval){ [ 625.092400] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523dbdcd-462a-f531-5bbd-2be4819546e0" [ 625.092400] env[68569]: _type = "Task" [ 625.092400] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.097909] env[68569]: DEBUG nova.network.neutron [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Successfully created port: 9b282c4e-170f-4f30-8c96-1a9b64168c47 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 625.119751] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] e77cc179-1f3d-4095-a491-48df7f79bdb9/e77cc179-1f3d-4095-a491-48df7f79bdb9.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 625.120990] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81334ebb-398f-4a07-9123-e9bb65348cec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.141475] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 625.141994] env[68569]: DEBUG nova.compute.manager [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 625.144838] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523dbdcd-462a-f531-5bbd-2be4819546e0, 'name': SearchDatastore_Task, 'duration_secs': 0.010348} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.145472] env[68569]: DEBUG oslo_concurrency.lockutils [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.611s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 625.145544] env[68569]: DEBUG nova.objects.instance [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Lazy-loading 'resources' on Instance uuid 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 625.148242] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3366ba0d-402e-4518-baaa-ff6140059d99 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.153801] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 625.153801] env[68569]: value = "task-3166526" [ 625.153801] env[68569]: _type = "Task" [ 625.153801] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.154896] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Waiting for the task: (returnval){ [ 625.154896] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523df965-f2cb-904a-0b6c-80211e2dd9db" [ 625.154896] env[68569]: _type = "Task" [ 625.154896] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.169801] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523df965-f2cb-904a-0b6c-80211e2dd9db, 'name': SearchDatastore_Task, 'duration_secs': 0.011129} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.173275] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.173789] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28/50b9775c-ddbd-4e8f-a2b8-b08c3028fc28.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 625.173902] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166526, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.174922] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-041e7a9c-d0ab-4351-9350-75b7bfa1e040 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.184901] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Waiting for the task: (returnval){ [ 625.184901] env[68569]: value = "task-3166527" [ 625.184901] env[68569]: _type = "Task" [ 625.184901] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.196346] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166527, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.518897] env[68569]: DEBUG oslo_concurrency.lockutils [req-a646e88b-c7eb-46ec-a0d3-d99a6d79d132 req-ce0bd160-4099-4399-8add-73ae800e002c service nova] Releasing lock "refresh_cache-7129a57f-e639-49ae-96a9-3c1d966034a8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.647050] env[68569]: DEBUG nova.compute.utils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 625.650162] env[68569]: DEBUG nova.compute.manager [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 625.650365] env[68569]: DEBUG nova.network.neutron [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 625.673693] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166526, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.702032] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166527, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.933951] env[68569]: DEBUG nova.policy [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'facbf3a2c92646b0b6d4974c03f6a3d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff829a2a7962492b946a99d623c337ac', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 626.070018] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffd8935-7356-4f55-8d20-badecfaa3ea0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.078031] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1afc2b3-82b9-48c4-90b8-837a04eaeeae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.115404] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a4234d-c251-4107-975c-f2dd212f34ce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.124674] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804b906b-9fff-47ae-9f78-2714008ca841 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.141548] env[68569]: DEBUG nova.compute.provider_tree [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.161645] env[68569]: DEBUG nova.compute.manager [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 626.183760] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166526, 'name': ReconfigVM_Task, 'duration_secs': 0.597372} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.183760] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Reconfigured VM instance instance-0000000e to attach disk [datastore1] e77cc179-1f3d-4095-a491-48df7f79bdb9/e77cc179-1f3d-4095-a491-48df7f79bdb9.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 626.184024] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eba759dd-1638-466e-a538-3cb83ec47694 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.193364] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 626.193364] env[68569]: value = "task-3166528" [ 626.193364] env[68569]: _type = "Task" [ 626.193364] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.197595] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166527, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514076} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.202599] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28/50b9775c-ddbd-4e8f-a2b8-b08c3028fc28.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 626.202599] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 626.203575] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dde068b1-af44-48e9-a467-9f258dbbbb10 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.215370] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166528, 'name': Rename_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.219118] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Waiting for the task: (returnval){ [ 626.219118] env[68569]: value = "task-3166529" [ 626.219118] env[68569]: _type = "Task" [ 626.219118] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.233429] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166529, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.292444] env[68569]: DEBUG nova.network.neutron [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Successfully updated port: 17651803-ec04-4913-b621-e685c574de0c {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 626.648369] env[68569]: DEBUG nova.scheduler.client.report [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 626.710116] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166528, 'name': Rename_Task, 'duration_secs': 0.163901} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.711211] env[68569]: DEBUG nova.network.neutron [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Successfully created port: eacf90a1-83a1-4998-918b-84e4dcf8a668 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 626.713595] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 626.714026] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6dc616f-226e-4d4f-9525-bdaf096713c6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.724095] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 626.724095] env[68569]: value = "task-3166531" [ 626.724095] env[68569]: _type = "Task" [ 626.724095] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.731920] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166529, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120082} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.732785] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 626.733486] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a286712c-492a-4d75-a218-946dd7675cc2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.740362] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166531, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.763869] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28/50b9775c-ddbd-4e8f-a2b8-b08c3028fc28.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 626.764240] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa83078c-ddb9-4c9d-bf3c-84518d02e8fb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.790523] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Waiting for the task: (returnval){ [ 626.790523] env[68569]: value = "task-3166532" [ 626.790523] env[68569]: _type = "Task" [ 626.790523] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.805093] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquiring lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.805093] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquired lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.806107] env[68569]: DEBUG nova.network.neutron [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 626.807069] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166532, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.853856] env[68569]: DEBUG nova.compute.manager [req-3d8d733d-59d8-4da9-9d60-516b2c377b17 req-9ee748c0-f899-40c3-8227-906d38a80be4 service nova] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Received event network-vif-plugged-39d832b9-c266-46b1-9633-7204b1095ba5 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 626.854537] env[68569]: DEBUG oslo_concurrency.lockutils [req-3d8d733d-59d8-4da9-9d60-516b2c377b17 req-9ee748c0-f899-40c3-8227-906d38a80be4 service nova] Acquiring lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.854929] env[68569]: DEBUG oslo_concurrency.lockutils [req-3d8d733d-59d8-4da9-9d60-516b2c377b17 req-9ee748c0-f899-40c3-8227-906d38a80be4 service nova] Lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.854983] env[68569]: DEBUG oslo_concurrency.lockutils [req-3d8d733d-59d8-4da9-9d60-516b2c377b17 req-9ee748c0-f899-40c3-8227-906d38a80be4 service nova] Lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.855189] env[68569]: DEBUG nova.compute.manager [req-3d8d733d-59d8-4da9-9d60-516b2c377b17 req-9ee748c0-f899-40c3-8227-906d38a80be4 service nova] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] No waiting events found dispatching network-vif-plugged-39d832b9-c266-46b1-9633-7204b1095ba5 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 626.855376] env[68569]: WARNING nova.compute.manager [req-3d8d733d-59d8-4da9-9d60-516b2c377b17 req-9ee748c0-f899-40c3-8227-906d38a80be4 service nova] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Received unexpected event network-vif-plugged-39d832b9-c266-46b1-9633-7204b1095ba5 for instance with vm_state building and task_state spawning. [ 626.855533] env[68569]: DEBUG nova.compute.manager [req-3d8d733d-59d8-4da9-9d60-516b2c377b17 req-9ee748c0-f899-40c3-8227-906d38a80be4 service nova] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Received event network-changed-39d832b9-c266-46b1-9633-7204b1095ba5 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 626.855687] env[68569]: DEBUG nova.compute.manager [req-3d8d733d-59d8-4da9-9d60-516b2c377b17 req-9ee748c0-f899-40c3-8227-906d38a80be4 service nova] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Refreshing instance network info cache due to event network-changed-39d832b9-c266-46b1-9633-7204b1095ba5. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 626.856053] env[68569]: DEBUG oslo_concurrency.lockutils [req-3d8d733d-59d8-4da9-9d60-516b2c377b17 req-9ee748c0-f899-40c3-8227-906d38a80be4 service nova] Acquiring lock "refresh_cache-50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.856183] env[68569]: DEBUG oslo_concurrency.lockutils [req-3d8d733d-59d8-4da9-9d60-516b2c377b17 req-9ee748c0-f899-40c3-8227-906d38a80be4 service nova] Acquired lock "refresh_cache-50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.856340] env[68569]: DEBUG nova.network.neutron [req-3d8d733d-59d8-4da9-9d60-516b2c377b17 req-9ee748c0-f899-40c3-8227-906d38a80be4 service nova] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Refreshing network info cache for port 39d832b9-c266-46b1-9633-7204b1095ba5 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 627.000092] env[68569]: DEBUG nova.compute.manager [req-f9096a39-7624-42ae-9967-34f34c38acc4 req-4bcf2b3c-1c6b-4760-9c9e-19b805358053 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Received event network-vif-plugged-17651803-ec04-4913-b621-e685c574de0c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 627.000092] env[68569]: DEBUG oslo_concurrency.lockutils [req-f9096a39-7624-42ae-9967-34f34c38acc4 req-4bcf2b3c-1c6b-4760-9c9e-19b805358053 service nova] Acquiring lock "ad207187-634f-4e7f-9809-eb3f742ddeec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.000092] env[68569]: DEBUG oslo_concurrency.lockutils [req-f9096a39-7624-42ae-9967-34f34c38acc4 req-4bcf2b3c-1c6b-4760-9c9e-19b805358053 service nova] Lock "ad207187-634f-4e7f-9809-eb3f742ddeec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.000092] env[68569]: DEBUG oslo_concurrency.lockutils [req-f9096a39-7624-42ae-9967-34f34c38acc4 req-4bcf2b3c-1c6b-4760-9c9e-19b805358053 service nova] Lock "ad207187-634f-4e7f-9809-eb3f742ddeec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.000092] env[68569]: DEBUG nova.compute.manager [req-f9096a39-7624-42ae-9967-34f34c38acc4 req-4bcf2b3c-1c6b-4760-9c9e-19b805358053 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] No waiting events found dispatching network-vif-plugged-17651803-ec04-4913-b621-e685c574de0c {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 627.000963] env[68569]: WARNING nova.compute.manager [req-f9096a39-7624-42ae-9967-34f34c38acc4 req-4bcf2b3c-1c6b-4760-9c9e-19b805358053 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Received unexpected event network-vif-plugged-17651803-ec04-4913-b621-e685c574de0c for instance with vm_state building and task_state spawning. [ 627.156518] env[68569]: DEBUG oslo_concurrency.lockutils [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.011s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.159334] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 16.611s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.181758] env[68569]: DEBUG nova.compute.manager [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 627.185290] env[68569]: INFO nova.scheduler.client.report [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Deleted allocations for instance 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a [ 627.227876] env[68569]: DEBUG nova.virt.hardware [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 627.227876] env[68569]: DEBUG nova.virt.hardware [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 627.229329] env[68569]: DEBUG nova.virt.hardware [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 627.229329] env[68569]: DEBUG nova.virt.hardware [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 627.229329] env[68569]: DEBUG nova.virt.hardware [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 627.229329] env[68569]: DEBUG nova.virt.hardware [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 627.229677] env[68569]: DEBUG nova.virt.hardware [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 627.229677] env[68569]: DEBUG nova.virt.hardware [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 627.229761] env[68569]: DEBUG nova.virt.hardware [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 627.230532] env[68569]: DEBUG nova.virt.hardware [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 627.230617] env[68569]: DEBUG nova.virt.hardware [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 627.233040] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3e580c-987b-473a-b07e-09fa0da6445a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.253716] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166531, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.255263] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6713bc91-ccf8-48ce-a4d1-52849088db2c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.306706] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166532, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.412791] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "239c1217-ba8e-474a-b02c-7d85e3ac92f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.413066] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "239c1217-ba8e-474a-b02c-7d85e3ac92f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.429739] env[68569]: DEBUG nova.network.neutron [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.668076] env[68569]: INFO nova.compute.claims [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 627.697444] env[68569]: DEBUG oslo_concurrency.lockutils [None req-250f0863-4d10-465c-b1bf-8dd12c75ce52 tempest-DeleteServersAdminTestJSON-257645121 tempest-DeleteServersAdminTestJSON-257645121-project-admin] Lock "87f6c6d0-4e3d-4608-bd0d-4771bc9c174a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.053s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.744252] env[68569]: DEBUG oslo_vmware.api [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166531, 'name': PowerOnVM_Task, 'duration_secs': 0.528668} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.744252] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 627.744252] env[68569]: INFO nova.compute.manager [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Took 10.71 seconds to spawn the instance on the hypervisor. [ 627.744252] env[68569]: DEBUG nova.compute.manager [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 627.745032] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f388dd1-7b40-4e6d-b84e-f932ff12b163 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.804056] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166532, 'name': ReconfigVM_Task, 'duration_secs': 0.535667} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.804056] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28/50b9775c-ddbd-4e8f-a2b8-b08c3028fc28.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 627.804228] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c03aa1b0-bc63-4f71-9557-278db9cf26fa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.813461] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Waiting for the task: (returnval){ [ 627.813461] env[68569]: value = "task-3166533" [ 627.813461] env[68569]: _type = "Task" [ 627.813461] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.824139] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166533, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.919561] env[68569]: DEBUG nova.compute.manager [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 628.016480] env[68569]: DEBUG nova.network.neutron [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Updating instance_info_cache with network_info: [{"id": "17651803-ec04-4913-b621-e685c574de0c", "address": "fa:16:3e:d8:ec:c5", "network": {"id": "daa85e6b-305e-4981-87a7-9c7e6cdd4113", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-224395556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de1ec9d4fc3e45afb009f75ac86d5f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17651803-ec", "ovs_interfaceid": "17651803-ec04-4913-b621-e685c574de0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.183913] env[68569]: INFO nova.compute.resource_tracker [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Updating resource usage from migration d07aff0d-55b9-4193-83cd-8ad96b7a389d [ 628.277076] env[68569]: INFO nova.compute.manager [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Took 26.05 seconds to build instance. [ 628.313343] env[68569]: DEBUG nova.network.neutron [req-3d8d733d-59d8-4da9-9d60-516b2c377b17 req-9ee748c0-f899-40c3-8227-906d38a80be4 service nova] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Updated VIF entry in instance network info cache for port 39d832b9-c266-46b1-9633-7204b1095ba5. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 628.313602] env[68569]: DEBUG nova.network.neutron [req-3d8d733d-59d8-4da9-9d60-516b2c377b17 req-9ee748c0-f899-40c3-8227-906d38a80be4 service nova] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Updating instance_info_cache with network_info: [{"id": "39d832b9-c266-46b1-9633-7204b1095ba5", "address": "fa:16:3e:43:8c:69", "network": {"id": "28a77754-516d-48cc-8a3e-a644b6fe8a04", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-714835425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b89a0650f57445abafb21ac1186b86c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39d832b9-c2", "ovs_interfaceid": "39d832b9-c266-46b1-9633-7204b1095ba5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.332589] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166533, 'name': Rename_Task, 'duration_secs': 0.412251} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.333328] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 628.334001] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d74a66cf-ab19-4ae2-91f9-f1a93fc15880 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.344458] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Waiting for the task: (returnval){ [ 628.344458] env[68569]: value = "task-3166534" [ 628.344458] env[68569]: _type = "Task" [ 628.344458] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.359504] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166534, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.454471] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.523018] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Releasing lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.523018] env[68569]: DEBUG nova.compute.manager [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Instance network_info: |[{"id": "17651803-ec04-4913-b621-e685c574de0c", "address": "fa:16:3e:d8:ec:c5", "network": {"id": "daa85e6b-305e-4981-87a7-9c7e6cdd4113", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-224395556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de1ec9d4fc3e45afb009f75ac86d5f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17651803-ec", "ovs_interfaceid": "17651803-ec04-4913-b621-e685c574de0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 628.523382] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:ec:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e27fd35-1d7b-4358-92d5-4d34da27b992', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '17651803-ec04-4913-b621-e685c574de0c', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 628.531286] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Creating folder: Project (de1ec9d4fc3e45afb009f75ac86d5f05). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 628.532018] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10a785a8-1e3d-4b4f-9915-2f11f36711bb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.549395] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Created folder: Project (de1ec9d4fc3e45afb009f75ac86d5f05) in parent group-v633430. [ 628.549395] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Creating folder: Instances. Parent ref: group-v633481. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 628.551930] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-184982ce-1712-49e7-8bfc-2666d49c99fc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.564725] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Created folder: Instances in parent group-v633481. [ 628.565044] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 628.565251] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 628.565459] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2cbda7b3-aae1-420b-8021-0538a0c4f317 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.595251] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 628.595251] env[68569]: value = "task-3166537" [ 628.595251] env[68569]: _type = "Task" [ 628.595251] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.617375] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166537, 'name': CreateVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.656731] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc98576-e996-4dd7-89f2-453e33968197 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.665223] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804b801a-14ea-45da-b3bc-be4dd4396bda {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.705890] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d342889f-1ad3-4321-b855-7403d29d33ca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.716450] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec601b6e-0345-490c-ba87-33d1e86cf9fb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.731419] env[68569]: DEBUG nova.compute.provider_tree [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 628.780149] env[68569]: DEBUG oslo_concurrency.lockutils [None req-002dac74-d5db-41a4-9591-cd8574ae61a9 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "e77cc179-1f3d-4095-a491-48df7f79bdb9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.482s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.816242] env[68569]: DEBUG nova.network.neutron [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Successfully updated port: 9b282c4e-170f-4f30-8c96-1a9b64168c47 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 628.824171] env[68569]: DEBUG oslo_concurrency.lockutils [req-3d8d733d-59d8-4da9-9d60-516b2c377b17 req-9ee748c0-f899-40c3-8227-906d38a80be4 service nova] Releasing lock "refresh_cache-50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.855550] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166534, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.906912] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "303f41c0-7a19-48b2-a072-4f138f6f8156" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.907126] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "303f41c0-7a19-48b2-a072-4f138f6f8156" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.110021] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166537, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.234363] env[68569]: DEBUG nova.scheduler.client.report [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 629.303081] env[68569]: DEBUG nova.network.neutron [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Successfully updated port: eacf90a1-83a1-4998-918b-84e4dcf8a668 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 629.322314] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.322314] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquired lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.322314] env[68569]: DEBUG nova.network.neutron [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 629.358766] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166534, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.409588] env[68569]: DEBUG nova.compute.manager [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 629.617227] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166537, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.741780] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.582s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 629.743504] env[68569]: INFO nova.compute.manager [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Migrating [ 629.743504] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.743504] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquired lock "compute-rpcapi-router" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.743852] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.091s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.746680] env[68569]: INFO nova.compute.claims [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 629.805807] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "refresh_cache-c56e4282-b1ca-42f5-b346-692779475df0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.806192] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquired lock "refresh_cache-c56e4282-b1ca-42f5-b346-692779475df0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.806402] env[68569]: DEBUG nova.network.neutron [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 629.857553] env[68569]: DEBUG oslo_vmware.api [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166534, 'name': PowerOnVM_Task, 'duration_secs': 1.481323} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.857927] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 629.858651] env[68569]: INFO nova.compute.manager [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Took 10.29 seconds to spawn the instance on the hypervisor. [ 629.858651] env[68569]: DEBUG nova.compute.manager [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 629.859125] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66413242-c473-4070-a028-5a38f357bd2a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.905007] env[68569]: DEBUG nova.network.neutron [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.934995] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.112635] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166537, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.220209] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Acquiring lock "26625edb-06ca-48cc-aaf1-3c55a6ea942b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.220209] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Lock "26625edb-06ca-48cc-aaf1-3c55a6ea942b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.220209] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Acquiring lock "26625edb-06ca-48cc-aaf1-3c55a6ea942b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.220209] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Lock "26625edb-06ca-48cc-aaf1-3c55a6ea942b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.220446] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Lock "26625edb-06ca-48cc-aaf1-3c55a6ea942b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.221493] env[68569]: INFO nova.compute.manager [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Terminating instance [ 630.251559] env[68569]: DEBUG nova.network.neutron [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Updating instance_info_cache with network_info: [{"id": "9b282c4e-170f-4f30-8c96-1a9b64168c47", "address": "fa:16:3e:e5:c7:1e", "network": {"id": "a2e78ea6-bf69-4312-abcf-eb606f96f132", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1264634806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d69fb55927344ad96aa0bf26f8f230e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b282c4e-17", "ovs_interfaceid": "9b282c4e-170f-4f30-8c96-1a9b64168c47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.253420] env[68569]: INFO nova.compute.rpcapi [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Automatically selected compute RPC version 6.4 from minimum service version 69 [ 630.253962] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Releasing lock "compute-rpcapi-router" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.303698] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Acquiring lock "b2e6de60-b4e5-4030-bca7-355d17fec06d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.306062] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Lock "b2e6de60-b4e5-4030-bca7-355d17fec06d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.358850] env[68569]: DEBUG nova.network.neutron [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.390782] env[68569]: INFO nova.compute.manager [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Took 26.41 seconds to build instance. [ 630.491559] env[68569]: DEBUG nova.compute.manager [req-b528f192-a478-49d7-a633-2ad7831ee239 req-1d91a07f-cf7c-4493-88a7-affd84f8817e service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Received event network-vif-plugged-9b282c4e-170f-4f30-8c96-1a9b64168c47 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 630.491559] env[68569]: DEBUG oslo_concurrency.lockutils [req-b528f192-a478-49d7-a633-2ad7831ee239 req-1d91a07f-cf7c-4493-88a7-affd84f8817e service nova] Acquiring lock "39a84212-2e52-4dba-b00c-5689564deaf4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.491907] env[68569]: DEBUG oslo_concurrency.lockutils [req-b528f192-a478-49d7-a633-2ad7831ee239 req-1d91a07f-cf7c-4493-88a7-affd84f8817e service nova] Lock "39a84212-2e52-4dba-b00c-5689564deaf4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.493043] env[68569]: DEBUG oslo_concurrency.lockutils [req-b528f192-a478-49d7-a633-2ad7831ee239 req-1d91a07f-cf7c-4493-88a7-affd84f8817e service nova] Lock "39a84212-2e52-4dba-b00c-5689564deaf4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.493385] env[68569]: DEBUG nova.compute.manager [req-b528f192-a478-49d7-a633-2ad7831ee239 req-1d91a07f-cf7c-4493-88a7-affd84f8817e service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] No waiting events found dispatching network-vif-plugged-9b282c4e-170f-4f30-8c96-1a9b64168c47 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 630.493675] env[68569]: WARNING nova.compute.manager [req-b528f192-a478-49d7-a633-2ad7831ee239 req-1d91a07f-cf7c-4493-88a7-affd84f8817e service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Received unexpected event network-vif-plugged-9b282c4e-170f-4f30-8c96-1a9b64168c47 for instance with vm_state building and task_state spawning. [ 630.494113] env[68569]: DEBUG nova.compute.manager [req-b528f192-a478-49d7-a633-2ad7831ee239 req-1d91a07f-cf7c-4493-88a7-affd84f8817e service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Received event network-changed-9b282c4e-170f-4f30-8c96-1a9b64168c47 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 630.494509] env[68569]: DEBUG nova.compute.manager [req-b528f192-a478-49d7-a633-2ad7831ee239 req-1d91a07f-cf7c-4493-88a7-affd84f8817e service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Refreshing instance network info cache due to event network-changed-9b282c4e-170f-4f30-8c96-1a9b64168c47. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 630.494934] env[68569]: DEBUG oslo_concurrency.lockutils [req-b528f192-a478-49d7-a633-2ad7831ee239 req-1d91a07f-cf7c-4493-88a7-affd84f8817e service nova] Acquiring lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.509453] env[68569]: DEBUG nova.compute.manager [req-f4d28ffe-fd7e-4d0f-b993-1f194a439bbd req-a42882c5-44f8-4d90-ba92-2a5d0121720d service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Received event network-changed-17651803-ec04-4913-b621-e685c574de0c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 630.510224] env[68569]: DEBUG nova.compute.manager [req-f4d28ffe-fd7e-4d0f-b993-1f194a439bbd req-a42882c5-44f8-4d90-ba92-2a5d0121720d service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Refreshing instance network info cache due to event network-changed-17651803-ec04-4913-b621-e685c574de0c. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 630.510224] env[68569]: DEBUG oslo_concurrency.lockutils [req-f4d28ffe-fd7e-4d0f-b993-1f194a439bbd req-a42882c5-44f8-4d90-ba92-2a5d0121720d service nova] Acquiring lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.510224] env[68569]: DEBUG oslo_concurrency.lockutils [req-f4d28ffe-fd7e-4d0f-b993-1f194a439bbd req-a42882c5-44f8-4d90-ba92-2a5d0121720d service nova] Acquired lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.510879] env[68569]: DEBUG nova.network.neutron [req-f4d28ffe-fd7e-4d0f-b993-1f194a439bbd req-a42882c5-44f8-4d90-ba92-2a5d0121720d service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Refreshing network info cache for port 17651803-ec04-4913-b621-e685c574de0c {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 630.611662] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166537, 'name': CreateVM_Task, 'duration_secs': 1.574459} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.612450] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 630.612999] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.613358] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.613914] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 630.614190] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eaebb53-9215-4c0e-9254-2aa3bd381b5d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.621487] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Waiting for the task: (returnval){ [ 630.621487] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52be2c45-fda5-1149-c3c4-b05da90d1650" [ 630.621487] env[68569]: _type = "Task" [ 630.621487] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.631194] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52be2c45-fda5-1149-c3c4-b05da90d1650, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.729660] env[68569]: DEBUG nova.compute.manager [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 630.729660] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 630.729660] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a86d47-a03d-4e99-ab43-5f2aa1f73f1d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.736606] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 630.737869] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b0dab16b-d553-40c7-a6a6-925bae15a6ce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.745528] env[68569]: DEBUG oslo_vmware.api [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Waiting for the task: (returnval){ [ 630.745528] env[68569]: value = "task-3166538" [ 630.745528] env[68569]: _type = "Task" [ 630.745528] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.761029] env[68569]: DEBUG oslo_vmware.api [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166538, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.768922] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Releasing lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.769295] env[68569]: DEBUG nova.compute.manager [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Instance network_info: |[{"id": "9b282c4e-170f-4f30-8c96-1a9b64168c47", "address": "fa:16:3e:e5:c7:1e", "network": {"id": "a2e78ea6-bf69-4312-abcf-eb606f96f132", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1264634806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d69fb55927344ad96aa0bf26f8f230e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b282c4e-17", "ovs_interfaceid": "9b282c4e-170f-4f30-8c96-1a9b64168c47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 630.769673] env[68569]: DEBUG oslo_concurrency.lockutils [req-b528f192-a478-49d7-a633-2ad7831ee239 req-1d91a07f-cf7c-4493-88a7-affd84f8817e service nova] Acquired lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.769940] env[68569]: DEBUG nova.network.neutron [req-b528f192-a478-49d7-a633-2ad7831ee239 req-1d91a07f-cf7c-4493-88a7-affd84f8817e service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Refreshing network info cache for port 9b282c4e-170f-4f30-8c96-1a9b64168c47 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 630.771453] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:c7:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db68bd64-5b56-49af-a075-13dcf85cb2e0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9b282c4e-170f-4f30-8c96-1a9b64168c47', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 630.788409] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Creating folder: Project (5d69fb55927344ad96aa0bf26f8f230e). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 630.792835] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "refresh_cache-925400c4-3b33-4f4a-9f63-3ceec06cf0b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.792946] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquired lock "refresh_cache-925400c4-3b33-4f4a-9f63-3ceec06cf0b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.793062] env[68569]: DEBUG nova.network.neutron [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 630.797986] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27b53488-2308-4047-901c-66ddf811c421 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.801698] env[68569]: DEBUG nova.network.neutron [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Updating instance_info_cache with network_info: [{"id": "eacf90a1-83a1-4998-918b-84e4dcf8a668", "address": "fa:16:3e:2e:b9:35", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.61", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeacf90a1-83", "ovs_interfaceid": "eacf90a1-83a1-4998-918b-84e4dcf8a668", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.808924] env[68569]: DEBUG nova.compute.manager [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 630.814950] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Created folder: Project (5d69fb55927344ad96aa0bf26f8f230e) in parent group-v633430. [ 630.815076] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Creating folder: Instances. Parent ref: group-v633484. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 630.815402] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6288890-34b1-48e7-bde6-a7321627424d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.827386] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Created folder: Instances in parent group-v633484. [ 630.827565] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 630.827700] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 630.827923] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c774919b-75c3-4190-936b-4e45a91a3ef1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.859271] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 630.859271] env[68569]: value = "task-3166541" [ 630.859271] env[68569]: _type = "Task" [ 630.859271] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.867137] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166541, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.896217] env[68569]: DEBUG oslo_concurrency.lockutils [None req-287a2ffe-5b75-4100-929e-8175172a995d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.830s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.040785] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Acquiring lock "f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.041114] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Lock "f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.124446] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquiring lock "a29854f9-0096-4b01-9350-bfddee84e2c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.125675] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Lock "a29854f9-0096-4b01-9350-bfddee84e2c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.140669] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52be2c45-fda5-1149-c3c4-b05da90d1650, 'name': SearchDatastore_Task, 'duration_secs': 0.041013} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.141232] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.141232] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 631.141494] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.141651] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.143096] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 631.143096] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7568dc99-9173-4a22-ae3b-4392f5464bf7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.156112] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 631.156112] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 631.156530] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9889ad7e-2783-4933-97f2-78c85421bd04 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.166368] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Waiting for the task: (returnval){ [ 631.166368] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5229f5ab-2c15-9d9b-673f-4fc9e976d401" [ 631.166368] env[68569]: _type = "Task" [ 631.166368] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.178197] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5229f5ab-2c15-9d9b-673f-4fc9e976d401, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.191967] env[68569]: DEBUG nova.network.neutron [req-b528f192-a478-49d7-a633-2ad7831ee239 req-1d91a07f-cf7c-4493-88a7-affd84f8817e service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Updated VIF entry in instance network info cache for port 9b282c4e-170f-4f30-8c96-1a9b64168c47. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 631.192817] env[68569]: DEBUG nova.network.neutron [req-b528f192-a478-49d7-a633-2ad7831ee239 req-1d91a07f-cf7c-4493-88a7-affd84f8817e service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Updating instance_info_cache with network_info: [{"id": "9b282c4e-170f-4f30-8c96-1a9b64168c47", "address": "fa:16:3e:e5:c7:1e", "network": {"id": "a2e78ea6-bf69-4312-abcf-eb606f96f132", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1264634806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d69fb55927344ad96aa0bf26f8f230e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b282c4e-17", "ovs_interfaceid": "9b282c4e-170f-4f30-8c96-1a9b64168c47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.257908] env[68569]: DEBUG oslo_vmware.api [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166538, 'name': PowerOffVM_Task, 'duration_secs': 0.327139} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.258221] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 631.259119] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 631.259119] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-29005ba6-a5cd-4cd0-8464-7cdd95b8d587 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.303597] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Releasing lock "refresh_cache-c56e4282-b1ca-42f5-b346-692779475df0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.304156] env[68569]: DEBUG nova.compute.manager [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Instance network_info: |[{"id": "eacf90a1-83a1-4998-918b-84e4dcf8a668", "address": "fa:16:3e:2e:b9:35", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.61", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeacf90a1-83", "ovs_interfaceid": "eacf90a1-83a1-4998-918b-84e4dcf8a668", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 631.305708] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:b9:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eacf90a1-83a1-4998-918b-84e4dcf8a668', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 631.316599] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Creating folder: Project (ff829a2a7962492b946a99d623c337ac). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.322448] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2abec583-8af8-47fc-ac20-90e6ff2a671b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.340193] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Created folder: Project (ff829a2a7962492b946a99d623c337ac) in parent group-v633430. [ 631.340486] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Creating folder: Instances. Parent ref: group-v633487. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.340704] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-706de00d-4e72-4dee-98fd-27d20c5140f7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.352357] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.355493] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Created folder: Instances in parent group-v633487. [ 631.356098] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 631.356518] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 631.357204] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-76c790de-4fba-4b87-9efe-cff32d116a07 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.382964] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b0f6a2-2e36-4c9f-9fad-eb6af9c37fa1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.397713] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166541, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.403886] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 631.403886] env[68569]: value = "task-3166545" [ 631.403886] env[68569]: _type = "Task" [ 631.403886] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.405250] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0f48f1-1bf5-4958-b374-3d33295057be {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.420296] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166545, 'name': CreateVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.452399] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b7c968-f6eb-4535-9b1e-709b8c6604b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.466240] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1591cf95-2df0-412c-872c-b4bbbcfe5096 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.485197] env[68569]: DEBUG nova.compute.provider_tree [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.548671] env[68569]: DEBUG nova.compute.manager [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 631.555356] env[68569]: DEBUG nova.network.neutron [req-f4d28ffe-fd7e-4d0f-b993-1f194a439bbd req-a42882c5-44f8-4d90-ba92-2a5d0121720d service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Updated VIF entry in instance network info cache for port 17651803-ec04-4913-b621-e685c574de0c. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 631.555356] env[68569]: DEBUG nova.network.neutron [req-f4d28ffe-fd7e-4d0f-b993-1f194a439bbd req-a42882c5-44f8-4d90-ba92-2a5d0121720d service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Updating instance_info_cache with network_info: [{"id": "17651803-ec04-4913-b621-e685c574de0c", "address": "fa:16:3e:d8:ec:c5", "network": {"id": "daa85e6b-305e-4981-87a7-9c7e6cdd4113", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-224395556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de1ec9d4fc3e45afb009f75ac86d5f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17651803-ec", "ovs_interfaceid": "17651803-ec04-4913-b621-e685c574de0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.681229] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5229f5ab-2c15-9d9b-673f-4fc9e976d401, 'name': SearchDatastore_Task, 'duration_secs': 0.015062} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.681229] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83aa907b-bb96-4e3a-85f0-178d346d71cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.692352] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Waiting for the task: (returnval){ [ 631.692352] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a2252c-f712-94b7-9e2e-d6f29ed218e2" [ 631.692352] env[68569]: _type = "Task" [ 631.692352] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.698577] env[68569]: DEBUG oslo_concurrency.lockutils [req-b528f192-a478-49d7-a633-2ad7831ee239 req-1d91a07f-cf7c-4493-88a7-affd84f8817e service nova] Releasing lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.704271] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a2252c-f712-94b7-9e2e-d6f29ed218e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.748218] env[68569]: DEBUG nova.network.neutron [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Updating instance_info_cache with network_info: [{"id": "a41c265a-33e8-416e-bf15-87c720e60d25", "address": "fa:16:3e:d2:db:da", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.113", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa41c265a-33", "ovs_interfaceid": "a41c265a-33e8-416e-bf15-87c720e60d25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.895382] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166541, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.919138] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166545, 'name': CreateVM_Task, 'duration_secs': 0.407616} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.922425] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 631.922425] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.922425] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.922425] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 631.922425] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b591fb3a-d023-4649-a304-46d39eeae940 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.927234] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 631.927234] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52555c02-05a4-a9a2-2de6-f98a0d12feb4" [ 631.927234] env[68569]: _type = "Task" [ 631.927234] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.940685] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52555c02-05a4-a9a2-2de6-f98a0d12feb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.992170] env[68569]: DEBUG nova.scheduler.client.report [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 632.063611] env[68569]: DEBUG oslo_concurrency.lockutils [req-f4d28ffe-fd7e-4d0f-b993-1f194a439bbd req-a42882c5-44f8-4d90-ba92-2a5d0121720d service nova] Releasing lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 632.075748] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.215059] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a2252c-f712-94b7-9e2e-d6f29ed218e2, 'name': SearchDatastore_Task, 'duration_secs': 0.0407} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.215561] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 632.218236] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] ad207187-634f-4e7f-9809-eb3f742ddeec/ad207187-634f-4e7f-9809-eb3f742ddeec.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 632.218236] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78886ef0-27b2-4702-9e26-860d962fe5d8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.228538] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Waiting for the task: (returnval){ [ 632.228538] env[68569]: value = "task-3166546" [ 632.228538] env[68569]: _type = "Task" [ 632.228538] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.239352] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166546, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.252309] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Releasing lock "refresh_cache-925400c4-3b33-4f4a-9f63-3ceec06cf0b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 632.398640] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Acquiring lock "2cde3729-1be6-42c5-891f-42a7a8bff267" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.398921] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Lock "2cde3729-1be6-42c5-891f-42a7a8bff267" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.404861] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166541, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.444968] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52555c02-05a4-a9a2-2de6-f98a0d12feb4, 'name': SearchDatastore_Task, 'duration_secs': 0.036878} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.445614] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 632.445614] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 632.445916] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.446907] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.447220] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 632.447594] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d01ef76c-4a51-4ff5-a479-c7b8b7f51824 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.459914] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 632.459914] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 632.460147] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8c307bb-793f-41be-b248-077612a5a3d7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.470946] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 632.470946] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bc9b14-135e-1513-6597-fe0c82edaefc" [ 632.470946] env[68569]: _type = "Task" [ 632.470946] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.486111] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bc9b14-135e-1513-6597-fe0c82edaefc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.498033] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.754s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.499919] env[68569]: DEBUG nova.compute.manager [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 632.505037] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.967s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.509527] env[68569]: INFO nova.compute.claims [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 632.606571] env[68569]: DEBUG oslo_concurrency.lockutils [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Acquiring lock "7129a57f-e639-49ae-96a9-3c1d966034a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.606856] env[68569]: DEBUG oslo_concurrency.lockutils [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Lock "7129a57f-e639-49ae-96a9-3c1d966034a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.606916] env[68569]: DEBUG oslo_concurrency.lockutils [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Acquiring lock "7129a57f-e639-49ae-96a9-3c1d966034a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.608070] env[68569]: DEBUG oslo_concurrency.lockutils [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Lock "7129a57f-e639-49ae-96a9-3c1d966034a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.608303] env[68569]: DEBUG oslo_concurrency.lockutils [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Lock "7129a57f-e639-49ae-96a9-3c1d966034a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.610405] env[68569]: INFO nova.compute.manager [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Terminating instance [ 632.745595] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166546, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.751917] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 632.752761] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 632.752761] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Deleting the datastore file [datastore1] 26625edb-06ca-48cc-aaf1-3c55a6ea942b {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 632.752761] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c8d692e-1727-45a5-afae-b9f704dc17d7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.770730] env[68569]: DEBUG oslo_vmware.api [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Waiting for the task: (returnval){ [ 632.770730] env[68569]: value = "task-3166547" [ 632.770730] env[68569]: _type = "Task" [ 632.770730] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.788222] env[68569]: DEBUG oslo_vmware.api [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.904606] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166541, 'name': CreateVM_Task, 'duration_secs': 1.918012} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.904859] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 632.905554] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.905688] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.906019] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 632.906995] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2abb897-0f77-4700-865b-c6c836deb081 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.918019] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 632.918019] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a0fe56-8b83-353f-2c6b-1361c048f897" [ 632.918019] env[68569]: _type = "Task" [ 632.918019] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.934444] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a0fe56-8b83-353f-2c6b-1361c048f897, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.985585] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bc9b14-135e-1513-6597-fe0c82edaefc, 'name': SearchDatastore_Task, 'duration_secs': 0.020844} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.986435] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa3a4fdb-ca0e-4dce-92dd-8632feacfdd8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.996683] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 632.996683] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52689509-e3d0-b5b0-809a-97362bce827b" [ 632.996683] env[68569]: _type = "Task" [ 632.996683] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.009024] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52689509-e3d0-b5b0-809a-97362bce827b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.014542] env[68569]: DEBUG nova.compute.utils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 633.018639] env[68569]: DEBUG nova.compute.manager [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 633.018927] env[68569]: DEBUG nova.network.neutron [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 633.088071] env[68569]: DEBUG nova.policy [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57ef72430e2043b6bf41727a18993460', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '26331f741df845d5b657d7268c1c9131', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 633.118948] env[68569]: DEBUG nova.compute.manager [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 633.119229] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 633.120109] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4894b092-8c75-40de-8d53-42e3abc39f6c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.129555] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 633.129555] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c88908b-a1ea-42f7-bcbe-1b3509bd0cdc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.139936] env[68569]: DEBUG oslo_vmware.api [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Waiting for the task: (returnval){ [ 633.139936] env[68569]: value = "task-3166548" [ 633.139936] env[68569]: _type = "Task" [ 633.139936] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.150492] env[68569]: DEBUG oslo_vmware.api [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166548, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.240740] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166546, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.633085} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.241047] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] ad207187-634f-4e7f-9809-eb3f742ddeec/ad207187-634f-4e7f-9809-eb3f742ddeec.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 633.241297] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 633.241469] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9faa2b73-15b5-4ed2-92f1-6ec2c842f77e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.251320] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Waiting for the task: (returnval){ [ 633.251320] env[68569]: value = "task-3166549" [ 633.251320] env[68569]: _type = "Task" [ 633.251320] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.265892] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166549, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.286133] env[68569]: DEBUG oslo_vmware.api [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Task: {'id': task-3166547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.453048} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.286546] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 633.287141] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 633.287254] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 633.287489] env[68569]: INFO nova.compute.manager [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Took 2.56 seconds to destroy the instance on the hypervisor. [ 633.288532] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 633.288532] env[68569]: DEBUG nova.compute.manager [-] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 633.288532] env[68569]: DEBUG nova.network.neutron [-] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 633.428232] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "bda7e09b-848b-4d5d-a49d-6e0639f22f99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.429127] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "bda7e09b-848b-4d5d-a49d-6e0639f22f99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.435041] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a0fe56-8b83-353f-2c6b-1361c048f897, 'name': SearchDatastore_Task, 'duration_secs': 0.012043} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.437786] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.439533] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 633.439533] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.439679] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 633.439790] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 633.440621] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a814c49-ed06-4cb9-bc07-988cbb731709 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.457976] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 633.458286] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 633.459066] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d802392-7273-4c90-bb8c-7fb08c0e4e19 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.464991] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 633.464991] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d763be-36e8-ceeb-7d66-99949fe8fea3" [ 633.464991] env[68569]: _type = "Task" [ 633.464991] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.476324] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d763be-36e8-ceeb-7d66-99949fe8fea3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.511342] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52689509-e3d0-b5b0-809a-97362bce827b, 'name': SearchDatastore_Task, 'duration_secs': 0.024687} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.511633] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.511889] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] c56e4282-b1ca-42f5-b346-692779475df0/c56e4282-b1ca-42f5-b346-692779475df0.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 633.516031] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aae5ee49-a219-4c1d-a142-5c387cf7f018 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.522048] env[68569]: DEBUG nova.compute.manager [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 633.527654] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 633.527654] env[68569]: value = "task-3166550" [ 633.527654] env[68569]: _type = "Task" [ 633.527654] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.540156] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166550, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.651992] env[68569]: DEBUG oslo_vmware.api [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166548, 'name': PowerOffVM_Task, 'duration_secs': 0.225806} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.654906] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 633.655150] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 633.655652] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0462231d-842d-4412-bc2e-f061193d782f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.659723] env[68569]: DEBUG nova.network.neutron [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Successfully created port: ce8c8a30-9a8b-4015-a5db-09a460d5765b {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 633.736137] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 633.736377] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 633.736561] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Deleting the datastore file [datastore2] 7129a57f-e639-49ae-96a9-3c1d966034a8 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 633.736883] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d3bd37c-1405-4cbf-8e38-542d1e22d085 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.749945] env[68569]: DEBUG oslo_vmware.api [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Waiting for the task: (returnval){ [ 633.749945] env[68569]: value = "task-3166552" [ 633.749945] env[68569]: _type = "Task" [ 633.749945] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.769446] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166549, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.122365} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.772986] env[68569]: DEBUG oslo_vmware.api [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166552, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.774476] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 633.775733] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148d2621-7f1a-4363-8ff0-55657a7760af {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.803485] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] ad207187-634f-4e7f-9809-eb3f742ddeec/ad207187-634f-4e7f-9809-eb3f742ddeec.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 633.810730] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a24c5ef-8861-4582-ac60-0699cb947547 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.838317] env[68569]: DEBUG nova.compute.manager [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Received event network-vif-plugged-eacf90a1-83a1-4998-918b-84e4dcf8a668 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 633.838317] env[68569]: DEBUG oslo_concurrency.lockutils [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] Acquiring lock "c56e4282-b1ca-42f5-b346-692779475df0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.838317] env[68569]: DEBUG oslo_concurrency.lockutils [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] Lock "c56e4282-b1ca-42f5-b346-692779475df0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.838317] env[68569]: DEBUG oslo_concurrency.lockutils [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] Lock "c56e4282-b1ca-42f5-b346-692779475df0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.838317] env[68569]: DEBUG nova.compute.manager [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] [instance: c56e4282-b1ca-42f5-b346-692779475df0] No waiting events found dispatching network-vif-plugged-eacf90a1-83a1-4998-918b-84e4dcf8a668 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 633.838545] env[68569]: WARNING nova.compute.manager [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Received unexpected event network-vif-plugged-eacf90a1-83a1-4998-918b-84e4dcf8a668 for instance with vm_state building and task_state spawning. [ 633.838973] env[68569]: DEBUG nova.compute.manager [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Received event network-changed-eacf90a1-83a1-4998-918b-84e4dcf8a668 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 633.838973] env[68569]: DEBUG nova.compute.manager [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Refreshing instance network info cache due to event network-changed-eacf90a1-83a1-4998-918b-84e4dcf8a668. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 633.838973] env[68569]: DEBUG oslo_concurrency.lockutils [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] Acquiring lock "refresh_cache-c56e4282-b1ca-42f5-b346-692779475df0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.839218] env[68569]: DEBUG oslo_concurrency.lockutils [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] Acquired lock "refresh_cache-c56e4282-b1ca-42f5-b346-692779475df0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 633.839301] env[68569]: DEBUG nova.network.neutron [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Refreshing network info cache for port eacf90a1-83a1-4998-918b-84e4dcf8a668 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 633.842383] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Waiting for the task: (returnval){ [ 633.842383] env[68569]: value = "task-3166553" [ 633.842383] env[68569]: _type = "Task" [ 633.842383] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.856266] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166553, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.982900] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d763be-36e8-ceeb-7d66-99949fe8fea3, 'name': SearchDatastore_Task, 'duration_secs': 0.045901} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.986577] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b6c4170-1091-4941-a9c6-e37eb867f876 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.996277] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 633.996277] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52067f35-d112-5a6b-0656-f4f973a6573b" [ 633.996277] env[68569]: _type = "Task" [ 633.996277] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.004866] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52067f35-d112-5a6b-0656-f4f973a6573b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.043035] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdaba13-d44c-4bed-95a0-f6559eb15669 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.057752] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166550, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.059416] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86aadc70-fa15-4b01-a98d-25fccf560364 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.095685] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f9b016-75b3-4c63-9610-2ae4c7330844 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.107026] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da18c61-f793-45d2-aee7-87056325c3ab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.126082] env[68569]: DEBUG nova.compute.provider_tree [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.246971] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Acquiring lock "7c887df0-4358-46c5-9682-0d4122e96d10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.247293] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Lock "7c887df0-4358-46c5-9682-0d4122e96d10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 634.262968] env[68569]: DEBUG oslo_vmware.api [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166552, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.360363] env[68569]: DEBUG nova.network.neutron [-] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.363376] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166553, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.515685] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52067f35-d112-5a6b-0656-f4f973a6573b, 'name': SearchDatastore_Task, 'duration_secs': 0.0286} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.515685] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 634.515685] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 39a84212-2e52-4dba-b00c-5689564deaf4/39a84212-2e52-4dba-b00c-5689564deaf4.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 634.515685] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-756e1718-37dd-4ca5-b41d-f57b289b3f7c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.525430] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 634.525430] env[68569]: value = "task-3166554" [ 634.525430] env[68569]: _type = "Task" [ 634.525430] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.536427] env[68569]: DEBUG nova.compute.manager [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 634.538772] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166554, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.548501] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166550, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.918023} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.551389] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] c56e4282-b1ca-42f5-b346-692779475df0/c56e4282-b1ca-42f5-b346-692779475df0.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 634.551389] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 634.551562] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e37a4401-32f4-4a65-a1a6-f359bbef0c61 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.558563] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 634.558563] env[68569]: value = "task-3166555" [ 634.558563] env[68569]: _type = "Task" [ 634.558563] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.567539] env[68569]: DEBUG nova.virt.hardware [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 634.567778] env[68569]: DEBUG nova.virt.hardware [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 634.567950] env[68569]: DEBUG nova.virt.hardware [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 634.568117] env[68569]: DEBUG nova.virt.hardware [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 634.568261] env[68569]: DEBUG nova.virt.hardware [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 634.568403] env[68569]: DEBUG nova.virt.hardware [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 634.568609] env[68569]: DEBUG nova.virt.hardware [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 634.568762] env[68569]: DEBUG nova.virt.hardware [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 634.568975] env[68569]: DEBUG nova.virt.hardware [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 634.569098] env[68569]: DEBUG nova.virt.hardware [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 634.569455] env[68569]: DEBUG nova.virt.hardware [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 634.570341] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11249158-d563-4eef-b93e-bfaa90cc9fa7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.576433] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166555, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.582620] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537dcfec-2f52-4d11-b4b6-525856763046 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.634193] env[68569]: DEBUG nova.scheduler.client.report [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 634.763095] env[68569]: DEBUG oslo_vmware.api [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Task: {'id': task-3166552, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.528872} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.763562] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 634.764021] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 634.764348] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 634.765274] env[68569]: INFO nova.compute.manager [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Took 1.65 seconds to destroy the instance on the hypervisor. [ 634.765274] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 634.765581] env[68569]: DEBUG nova.compute.manager [-] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 634.765795] env[68569]: DEBUG nova.network.neutron [-] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 634.769959] env[68569]: DEBUG nova.network.neutron [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Updated VIF entry in instance network info cache for port eacf90a1-83a1-4998-918b-84e4dcf8a668. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 634.771018] env[68569]: DEBUG nova.network.neutron [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Updating instance_info_cache with network_info: [{"id": "eacf90a1-83a1-4998-918b-84e4dcf8a668", "address": "fa:16:3e:2e:b9:35", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.61", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeacf90a1-83", "ovs_interfaceid": "eacf90a1-83a1-4998-918b-84e4dcf8a668", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.785059] env[68569]: ERROR nova.compute.manager [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Setting instance vm_state to ERROR: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 634.785059] env[68569]: ERROR nova.compute.manager [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Traceback (most recent call last): [ 634.785059] env[68569]: ERROR nova.compute.manager [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] File "/opt/stack/nova/nova/compute/manager.py", line 11478, in _error_out_instance_on_exception [ 634.785059] env[68569]: ERROR nova.compute.manager [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] yield [ 634.785059] env[68569]: ERROR nova.compute.manager [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 634.785059] env[68569]: ERROR nova.compute.manager [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] disk_info = self.driver.migrate_disk_and_power_off( [ 634.785059] env[68569]: ERROR nova.compute.manager [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 634.785059] env[68569]: ERROR nova.compute.manager [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] [ 634.862750] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166553, 'name': ReconfigVM_Task, 'duration_secs': 0.564988} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.865295] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Reconfigured VM instance instance-00000010 to attach disk [datastore1] ad207187-634f-4e7f-9809-eb3f742ddeec/ad207187-634f-4e7f-9809-eb3f742ddeec.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 634.869020] env[68569]: INFO nova.compute.manager [-] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Took 1.58 seconds to deallocate network for instance. [ 634.869020] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac193e3f-0807-4fe4-ad10-76e049c02582 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.884575] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Waiting for the task: (returnval){ [ 634.884575] env[68569]: value = "task-3166556" [ 634.884575] env[68569]: _type = "Task" [ 634.884575] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.895812] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166556, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.038211] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166554, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.072406] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166555, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093295} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.072699] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 635.073563] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0067f68a-5c7a-4677-9ef0-0e67070f3f34 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.101392] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] c56e4282-b1ca-42f5-b346-692779475df0/c56e4282-b1ca-42f5-b346-692779475df0.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 635.101725] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16fd26c6-f5f6-4668-87a2-30f8a22a05f7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.125902] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 635.125902] env[68569]: value = "task-3166557" [ 635.125902] env[68569]: _type = "Task" [ 635.125902] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.138024] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166557, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.139309] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.635s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.140293] env[68569]: DEBUG nova.compute.manager [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 635.142438] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.916s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.143961] env[68569]: INFO nova.compute.claims [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 635.273631] env[68569]: DEBUG oslo_concurrency.lockutils [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] Releasing lock "refresh_cache-c56e4282-b1ca-42f5-b346-692779475df0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.275070] env[68569]: DEBUG nova.compute.manager [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Received event network-changed-b7d8aded-6354-4a88-941e-005173bada5f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 635.275070] env[68569]: DEBUG nova.compute.manager [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Refreshing instance network info cache due to event network-changed-b7d8aded-6354-4a88-941e-005173bada5f. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 635.275070] env[68569]: DEBUG oslo_concurrency.lockutils [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] Acquiring lock "refresh_cache-e77cc179-1f3d-4095-a491-48df7f79bdb9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.275070] env[68569]: DEBUG oslo_concurrency.lockutils [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] Acquired lock "refresh_cache-e77cc179-1f3d-4095-a491-48df7f79bdb9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 635.275070] env[68569]: DEBUG nova.network.neutron [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Refreshing network info cache for port b7d8aded-6354-4a88-941e-005173bada5f {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 635.313819] env[68569]: INFO nova.compute.manager [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Swapping old allocation on dict_keys(['a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6']) held by migration d07aff0d-55b9-4193-83cd-8ad96b7a389d for instance [ 635.344297] env[68569]: DEBUG nova.scheduler.client.report [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Overwriting current allocation {'allocations': {'a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 39}}, 'project_id': 'fb7d044e2a2e4568b5c8c922b17a81ce', 'user_id': '48f0153c75da4790905b1f734eb447e0', 'consumer_generation': 1} on consumer 925400c4-3b33-4f4a-9f63-3ceec06cf0b7 {{(pid=68569) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 635.381677] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.395729] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166556, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.541832] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166554, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.791345} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.542139] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 39a84212-2e52-4dba-b00c-5689564deaf4/39a84212-2e52-4dba-b00c-5689564deaf4.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 635.542359] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 635.542615] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-878b2d6a-b3b4-4676-b20a-1e5448f12260 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.553015] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 635.553015] env[68569]: value = "task-3166558" [ 635.553015] env[68569]: _type = "Task" [ 635.553015] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.565650] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166558, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.638443] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166557, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.646962] env[68569]: DEBUG nova.network.neutron [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Successfully updated port: ce8c8a30-9a8b-4015-a5db-09a460d5765b {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 635.654346] env[68569]: DEBUG nova.compute.utils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 635.658093] env[68569]: DEBUG nova.compute.manager [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 635.658273] env[68569]: DEBUG nova.network.neutron [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 635.719026] env[68569]: DEBUG nova.policy [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'facbf3a2c92646b0b6d4974c03f6a3d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff829a2a7962492b946a99d623c337ac', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 635.900495] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166556, 'name': Rename_Task, 'duration_secs': 0.590266} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.900829] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 635.901142] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c74127ca-574c-4d32-9521-53e96cf55c5b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.912537] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Waiting for the task: (returnval){ [ 635.912537] env[68569]: value = "task-3166559" [ 635.912537] env[68569]: _type = "Task" [ 635.912537] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.923667] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166559, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.954676] env[68569]: DEBUG nova.network.neutron [-] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.071240] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "ec64b2fd-2409-4af1-8f51-cc0ccbba14f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.071240] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "ec64b2fd-2409-4af1-8f51-cc0ccbba14f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.071240] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166558, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096656} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.071240] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 636.071417] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607a105b-eb75-45f6-9868-d9bbfdb5a37d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.095151] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 39a84212-2e52-4dba-b00c-5689564deaf4/39a84212-2e52-4dba-b00c-5689564deaf4.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 636.096622] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a3bc96f-430f-4425-b511-a5ca6a4ad678 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.120087] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "78f486aa-80f4-4d43-bd00-cc6206517a72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.120431] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "78f486aa-80f4-4d43-bd00-cc6206517a72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.128023] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 636.128023] env[68569]: value = "task-3166560" [ 636.128023] env[68569]: _type = "Task" [ 636.128023] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.142368] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166560, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.146477] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166557, 'name': ReconfigVM_Task, 'duration_secs': 0.836455} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.146796] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Reconfigured VM instance instance-00000012 to attach disk [datastore2] c56e4282-b1ca-42f5-b346-692779475df0/c56e4282-b1ca-42f5-b346-692779475df0.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 636.148965] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9a9088d-cbb1-4c3a-8a09-e615f5267044 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.159972] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "refresh_cache-40b768c1-9007-4f78-a90f-61b2ac64553f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.159972] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquired lock "refresh_cache-40b768c1-9007-4f78-a90f-61b2ac64553f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.159972] env[68569]: DEBUG nova.network.neutron [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 636.161797] env[68569]: DEBUG nova.compute.manager [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 636.165094] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 636.165094] env[68569]: value = "task-3166561" [ 636.165094] env[68569]: _type = "Task" [ 636.165094] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.180433] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166561, 'name': Rename_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.284051] env[68569]: DEBUG nova.network.neutron [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Successfully created port: 9fcabc27-ebd3-48dd-bf0d-1829ee0f304f {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 636.287179] env[68569]: DEBUG nova.network.neutron [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Updated VIF entry in instance network info cache for port b7d8aded-6354-4a88-941e-005173bada5f. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 636.287537] env[68569]: DEBUG nova.network.neutron [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Updating instance_info_cache with network_info: [{"id": "b7d8aded-6354-4a88-941e-005173bada5f", "address": "fa:16:3e:ef:31:49", "network": {"id": "1ff7b043-4f7f-405a-b1d8-d45bcd02c339", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-521857199-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1de4680b5844def8ff462ea7ef6a25c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7d8aded-63", "ovs_interfaceid": "b7d8aded-6354-4a88-941e-005173bada5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.426123] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166559, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.459535] env[68569]: DEBUG nova.compute.manager [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Received event network-vif-deleted-cacf394f-f702-4d94-8010-c6f2d2e14123 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 636.459535] env[68569]: DEBUG nova.compute.manager [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Received event network-changed-39d832b9-c266-46b1-9633-7204b1095ba5 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 636.459535] env[68569]: DEBUG nova.compute.manager [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Refreshing instance network info cache due to event network-changed-39d832b9-c266-46b1-9633-7204b1095ba5. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 636.460155] env[68569]: DEBUG oslo_concurrency.lockutils [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] Acquiring lock "refresh_cache-50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.460155] env[68569]: DEBUG oslo_concurrency.lockutils [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] Acquired lock "refresh_cache-50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.460155] env[68569]: DEBUG nova.network.neutron [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Refreshing network info cache for port 39d832b9-c266-46b1-9633-7204b1095ba5 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 636.462143] env[68569]: INFO nova.compute.manager [-] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Took 1.70 seconds to deallocate network for instance. [ 636.650190] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166560, 'name': ReconfigVM_Task, 'duration_secs': 0.428422} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.650190] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 39a84212-2e52-4dba-b00c-5689564deaf4/39a84212-2e52-4dba-b00c-5689564deaf4.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 636.650765] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-60f3b474-90f2-45f7-bd28-d317bb4be7ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.661779] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 636.661779] env[68569]: value = "task-3166562" [ 636.661779] env[68569]: _type = "Task" [ 636.661779] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.674401] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166562, 'name': Rename_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.688036] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166561, 'name': Rename_Task, 'duration_secs': 0.156219} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.688036] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 636.688036] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db509583-53a2-4c42-8cb2-74e397dc9cde {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.695136] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 636.695136] env[68569]: value = "task-3166563" [ 636.695136] env[68569]: _type = "Task" [ 636.695136] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.708346] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166563, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.712287] env[68569]: DEBUG nova.network.neutron [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 636.716074] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd2128e-8a2c-4b2f-bba7-68d0de3e0ac9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.724543] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08298ab-7b41-45aa-bfb7-4f9725865d0e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.758973] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40fafab6-4a3b-4863-be43-2c174b6c932c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.767984] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3f6431-858f-417b-8451-3536e63ae331 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.782058] env[68569]: DEBUG nova.compute.provider_tree [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.792057] env[68569]: DEBUG oslo_concurrency.lockutils [req-9f1df664-59da-4995-9a28-19edcc736235 req-f2198831-062a-49e1-b5ea-5d9adc68265c service nova] Releasing lock "refresh_cache-e77cc179-1f3d-4095-a491-48df7f79bdb9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.884469] env[68569]: DEBUG nova.network.neutron [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Updating instance_info_cache with network_info: [{"id": "ce8c8a30-9a8b-4015-a5db-09a460d5765b", "address": "fa:16:3e:9d:df:96", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce8c8a30-9a", "ovs_interfaceid": "ce8c8a30-9a8b-4015-a5db-09a460d5765b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.924231] env[68569]: DEBUG oslo_vmware.api [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166559, 'name': PowerOnVM_Task, 'duration_secs': 0.608133} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.924509] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 636.924705] env[68569]: INFO nova.compute.manager [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Took 14.88 seconds to spawn the instance on the hypervisor. [ 636.924951] env[68569]: DEBUG nova.compute.manager [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 636.925795] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a858342f-3903-4644-994f-82340769aafe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.942769] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.971447] env[68569]: DEBUG oslo_concurrency.lockutils [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.174624] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166562, 'name': Rename_Task, 'duration_secs': 0.401781} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.174991] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 637.175191] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f59b130-600a-4e9f-8d2d-1d194192ab9c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.178888] env[68569]: DEBUG nova.compute.manager [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 637.188054] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 637.188054] env[68569]: value = "task-3166564" [ 637.188054] env[68569]: _type = "Task" [ 637.188054] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.201722] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166564, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.202849] env[68569]: DEBUG nova.network.neutron [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Updated VIF entry in instance network info cache for port 39d832b9-c266-46b1-9633-7204b1095ba5. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 637.203490] env[68569]: DEBUG nova.network.neutron [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Updating instance_info_cache with network_info: [{"id": "39d832b9-c266-46b1-9633-7204b1095ba5", "address": "fa:16:3e:43:8c:69", "network": {"id": "28a77754-516d-48cc-8a3e-a644b6fe8a04", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-714835425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b89a0650f57445abafb21ac1186b86c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39d832b9-c2", "ovs_interfaceid": "39d832b9-c266-46b1-9633-7204b1095ba5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.220699] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166563, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.224054] env[68569]: DEBUG nova.virt.hardware [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 637.224381] env[68569]: DEBUG nova.virt.hardware [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 637.224584] env[68569]: DEBUG nova.virt.hardware [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 637.224800] env[68569]: DEBUG nova.virt.hardware [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 637.225015] env[68569]: DEBUG nova.virt.hardware [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 637.225211] env[68569]: DEBUG nova.virt.hardware [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 637.225450] env[68569]: DEBUG nova.virt.hardware [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 637.225632] env[68569]: DEBUG nova.virt.hardware [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 637.225823] env[68569]: DEBUG nova.virt.hardware [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 637.226027] env[68569]: DEBUG nova.virt.hardware [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 637.226301] env[68569]: DEBUG nova.virt.hardware [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 637.227515] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b2f52f-1a28-465d-ad4e-ee1dd427d3f4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.236772] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60791fb8-ecb8-4b67-83f8-3a7049adf062 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.287788] env[68569]: DEBUG nova.scheduler.client.report [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 637.386914] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Releasing lock "refresh_cache-40b768c1-9007-4f78-a90f-61b2ac64553f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 637.387273] env[68569]: DEBUG nova.compute.manager [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Instance network_info: |[{"id": "ce8c8a30-9a8b-4015-a5db-09a460d5765b", "address": "fa:16:3e:9d:df:96", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce8c8a30-9a", "ovs_interfaceid": "ce8c8a30-9a8b-4015-a5db-09a460d5765b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 637.387685] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:df:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce8c8a30-9a8b-4015-a5db-09a460d5765b', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 637.396510] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 637.396510] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 637.396510] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b86a1d1d-2281-4f3a-80f1-7da120d1c793 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.417324] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 637.417324] env[68569]: value = "task-3166565" [ 637.417324] env[68569]: _type = "Task" [ 637.417324] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.426312] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166565, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.455821] env[68569]: INFO nova.compute.manager [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Took 32.42 seconds to build instance. [ 637.696684] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166564, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.710097] env[68569]: DEBUG oslo_vmware.api [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166563, 'name': PowerOnVM_Task, 'duration_secs': 0.755285} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.710600] env[68569]: DEBUG oslo_concurrency.lockutils [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] Releasing lock "refresh_cache-50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 637.710843] env[68569]: DEBUG nova.compute.manager [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Received event network-vif-deleted-05b63b18-ef8d-4346-992c-880e73eb22d9 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 637.711015] env[68569]: DEBUG nova.compute.manager [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Received event network-vif-plugged-ce8c8a30-9a8b-4015-a5db-09a460d5765b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 637.711218] env[68569]: DEBUG oslo_concurrency.lockutils [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] Acquiring lock "40b768c1-9007-4f78-a90f-61b2ac64553f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.711419] env[68569]: DEBUG oslo_concurrency.lockutils [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] Lock "40b768c1-9007-4f78-a90f-61b2ac64553f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.711574] env[68569]: DEBUG oslo_concurrency.lockutils [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] Lock "40b768c1-9007-4f78-a90f-61b2ac64553f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.711736] env[68569]: DEBUG nova.compute.manager [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] No waiting events found dispatching network-vif-plugged-ce8c8a30-9a8b-4015-a5db-09a460d5765b {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 637.711902] env[68569]: WARNING nova.compute.manager [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Received unexpected event network-vif-plugged-ce8c8a30-9a8b-4015-a5db-09a460d5765b for instance with vm_state building and task_state spawning. [ 637.712075] env[68569]: DEBUG nova.compute.manager [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Received event network-changed-ce8c8a30-9a8b-4015-a5db-09a460d5765b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 637.712229] env[68569]: DEBUG nova.compute.manager [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Refreshing instance network info cache due to event network-changed-ce8c8a30-9a8b-4015-a5db-09a460d5765b. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 637.712419] env[68569]: DEBUG oslo_concurrency.lockutils [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] Acquiring lock "refresh_cache-40b768c1-9007-4f78-a90f-61b2ac64553f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.712556] env[68569]: DEBUG oslo_concurrency.lockutils [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] Acquired lock "refresh_cache-40b768c1-9007-4f78-a90f-61b2ac64553f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 637.712715] env[68569]: DEBUG nova.network.neutron [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Refreshing network info cache for port ce8c8a30-9a8b-4015-a5db-09a460d5765b {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 637.713811] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 637.714692] env[68569]: INFO nova.compute.manager [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Took 10.53 seconds to spawn the instance on the hypervisor. [ 637.714692] env[68569]: DEBUG nova.compute.manager [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 637.715012] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ea3762-e034-4f2b-96c9-46f87243e424 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.793032] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.650s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.793657] env[68569]: DEBUG nova.compute.manager [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 637.796387] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.408s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.796612] env[68569]: DEBUG nova.objects.instance [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Lazy-loading 'resources' on Instance uuid d6c45731-d76a-46cf-9b7d-be035a200948 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 637.927962] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166565, 'name': CreateVM_Task, 'duration_secs': 0.392205} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.928499] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 637.929813] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.930134] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 637.930717] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 637.931143] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b2df7ee-6d50-4d71-99ba-99a2ca3e9093 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.937848] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 637.937848] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a351da-110c-d749-a250-ac0d0c8a5042" [ 637.937848] env[68569]: _type = "Task" [ 637.937848] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.944168] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a351da-110c-d749-a250-ac0d0c8a5042, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.957896] env[68569]: DEBUG oslo_concurrency.lockutils [None req-581af401-ac26-4fe3-9fd9-e3f707abe0ec tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Lock "ad207187-634f-4e7f-9809-eb3f742ddeec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.086s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.023987] env[68569]: DEBUG nova.network.neutron [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Successfully updated port: 9fcabc27-ebd3-48dd-bf0d-1829ee0f304f {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 638.200666] env[68569]: DEBUG oslo_vmware.api [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166564, 'name': PowerOnVM_Task, 'duration_secs': 0.67389} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.201045] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 638.201236] env[68569]: INFO nova.compute.manager [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Took 13.61 seconds to spawn the instance on the hypervisor. [ 638.201505] env[68569]: DEBUG nova.compute.manager [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 638.202669] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab54390-4c34-4fa5-a622-b17570ecb601 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.233949] env[68569]: INFO nova.compute.manager [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Took 28.46 seconds to build instance. [ 638.300405] env[68569]: DEBUG nova.compute.utils [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 638.306173] env[68569]: DEBUG nova.compute.manager [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Not allocating networking since 'none' was specified. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 638.449596] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a351da-110c-d749-a250-ac0d0c8a5042, 'name': SearchDatastore_Task, 'duration_secs': 0.010257} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.452192] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.452431] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 638.452657] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.452798] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.453085] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 638.455339] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f61f23c-dc37-4010-a102-2594502b9914 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.460190] env[68569]: DEBUG nova.compute.manager [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 638.468634] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 638.468821] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 638.469597] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7d15a3c-0ee1-4b37-b7c2-b236cb4e5cac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.480186] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 638.480186] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52165050-d3aa-070f-e9ba-1e6ece048af5" [ 638.480186] env[68569]: _type = "Task" [ 638.480186] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.489123] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52165050-d3aa-070f-e9ba-1e6ece048af5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.495560] env[68569]: DEBUG nova.network.neutron [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Updated VIF entry in instance network info cache for port ce8c8a30-9a8b-4015-a5db-09a460d5765b. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 638.495983] env[68569]: DEBUG nova.network.neutron [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Updating instance_info_cache with network_info: [{"id": "ce8c8a30-9a8b-4015-a5db-09a460d5765b", "address": "fa:16:3e:9d:df:96", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce8c8a30-9a", "ovs_interfaceid": "ce8c8a30-9a8b-4015-a5db-09a460d5765b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.528270] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "refresh_cache-c9264123-ab19-40d5-959a-791b8966d2f6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.528436] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquired lock "refresh_cache-c9264123-ab19-40d5-959a-791b8966d2f6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.529025] env[68569]: DEBUG nova.network.neutron [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 638.725652] env[68569]: INFO nova.compute.manager [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Took 33.32 seconds to build instance. [ 638.737481] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02bfde68-a566-4732-8e01-c26f8495a1f4 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "c56e4282-b1ca-42f5-b346-692779475df0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.974s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.800973] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2946d11a-d569-4a12-b66e-ac991a052740 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.806577] env[68569]: DEBUG nova.compute.manager [req-bb0c9d17-2456-4f56-a2c5-560c815cd892 req-e90bdaff-9352-4696-8911-79181d2df09a service nova] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Received event network-vif-plugged-9fcabc27-ebd3-48dd-bf0d-1829ee0f304f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 638.806736] env[68569]: DEBUG oslo_concurrency.lockutils [req-bb0c9d17-2456-4f56-a2c5-560c815cd892 req-e90bdaff-9352-4696-8911-79181d2df09a service nova] Acquiring lock "c9264123-ab19-40d5-959a-791b8966d2f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.807070] env[68569]: DEBUG oslo_concurrency.lockutils [req-bb0c9d17-2456-4f56-a2c5-560c815cd892 req-e90bdaff-9352-4696-8911-79181d2df09a service nova] Lock "c9264123-ab19-40d5-959a-791b8966d2f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.807246] env[68569]: DEBUG oslo_concurrency.lockutils [req-bb0c9d17-2456-4f56-a2c5-560c815cd892 req-e90bdaff-9352-4696-8911-79181d2df09a service nova] Lock "c9264123-ab19-40d5-959a-791b8966d2f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.807460] env[68569]: DEBUG nova.compute.manager [req-bb0c9d17-2456-4f56-a2c5-560c815cd892 req-e90bdaff-9352-4696-8911-79181d2df09a service nova] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] No waiting events found dispatching network-vif-plugged-9fcabc27-ebd3-48dd-bf0d-1829ee0f304f {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 638.807631] env[68569]: WARNING nova.compute.manager [req-bb0c9d17-2456-4f56-a2c5-560c815cd892 req-e90bdaff-9352-4696-8911-79181d2df09a service nova] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Received unexpected event network-vif-plugged-9fcabc27-ebd3-48dd-bf0d-1829ee0f304f for instance with vm_state building and task_state spawning. [ 638.807715] env[68569]: DEBUG nova.compute.manager [req-bb0c9d17-2456-4f56-a2c5-560c815cd892 req-e90bdaff-9352-4696-8911-79181d2df09a service nova] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Received event network-changed-9fcabc27-ebd3-48dd-bf0d-1829ee0f304f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 638.808109] env[68569]: DEBUG nova.compute.manager [req-bb0c9d17-2456-4f56-a2c5-560c815cd892 req-e90bdaff-9352-4696-8911-79181d2df09a service nova] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Refreshing instance network info cache due to event network-changed-9fcabc27-ebd3-48dd-bf0d-1829ee0f304f. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 638.808109] env[68569]: DEBUG oslo_concurrency.lockutils [req-bb0c9d17-2456-4f56-a2c5-560c815cd892 req-e90bdaff-9352-4696-8911-79181d2df09a service nova] Acquiring lock "refresh_cache-c9264123-ab19-40d5-959a-791b8966d2f6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.808633] env[68569]: DEBUG nova.compute.manager [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 638.817122] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de897c82-47d6-4257-979f-e67ab7c38dc2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.850466] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f7bb4a-104b-40bb-aa21-c9f294a69854 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.860766] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92109a5a-533e-4552-a627-af751158a958 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.876742] env[68569]: DEBUG nova.compute.provider_tree [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.994147] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52165050-d3aa-070f-e9ba-1e6ece048af5, 'name': SearchDatastore_Task, 'duration_secs': 0.018179} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.994781] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59749d82-94c3-49f0-9d13-52c019d69c75 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.999607] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.001119] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Acquiring lock "1c8dfb47-df19-4101-8d4e-30889d71d7da" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.001323] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Lock "1c8dfb47-df19-4101-8d4e-30889d71d7da" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.001509] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Acquiring lock "1c8dfb47-df19-4101-8d4e-30889d71d7da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.001690] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Lock "1c8dfb47-df19-4101-8d4e-30889d71d7da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.001836] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Lock "1c8dfb47-df19-4101-8d4e-30889d71d7da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.003898] env[68569]: INFO nova.compute.manager [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Terminating instance [ 639.007084] env[68569]: DEBUG oslo_concurrency.lockutils [req-0b58b62f-7247-4003-b011-355949a5c39c req-b33657bd-9d6d-4010-92d4-d60795a46893 service nova] Releasing lock "refresh_cache-40b768c1-9007-4f78-a90f-61b2ac64553f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 639.007419] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 639.007419] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d81798-32e1-d50f-500e-655d49da878c" [ 639.007419] env[68569]: _type = "Task" [ 639.007419] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.016659] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d81798-32e1-d50f-500e-655d49da878c, 'name': SearchDatastore_Task, 'duration_secs': 0.011532} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.017453] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 639.017732] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 40b768c1-9007-4f78-a90f-61b2ac64553f/40b768c1-9007-4f78-a90f-61b2ac64553f.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 639.018024] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32b53a12-c4f3-419f-8b4f-fa5c71e5532c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.034672] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 639.034672] env[68569]: value = "task-3166566" [ 639.034672] env[68569]: _type = "Task" [ 639.034672] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.044761] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166566, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.071928] env[68569]: DEBUG nova.network.neutron [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 639.195853] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "912303de-a79d-41b0-ab44-c79e850a4dee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.196494] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "912303de-a79d-41b0-ab44-c79e850a4dee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.229196] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f3b676-9d09-4ec8-94cc-34df5ee12a25 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "39a84212-2e52-4dba-b00c-5689564deaf4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.200s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.243314] env[68569]: DEBUG nova.compute.manager [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 639.272832] env[68569]: DEBUG nova.network.neutron [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Updating instance_info_cache with network_info: [{"id": "9fcabc27-ebd3-48dd-bf0d-1829ee0f304f", "address": "fa:16:3e:b9:ae:48", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fcabc27-eb", "ovs_interfaceid": "9fcabc27-ebd3-48dd-bf0d-1829ee0f304f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.380187] env[68569]: DEBUG nova.scheduler.client.report [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 639.507990] env[68569]: DEBUG nova.compute.manager [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 639.508334] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 639.509313] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c31244-f498-414c-bdc0-4c3a09159aea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.518522] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 639.518522] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed536d3b-009b-4686-9558-cbed9696f79b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.529809] env[68569]: DEBUG oslo_vmware.api [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Waiting for the task: (returnval){ [ 639.529809] env[68569]: value = "task-3166567" [ 639.529809] env[68569]: _type = "Task" [ 639.529809] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.545124] env[68569]: DEBUG oslo_vmware.api [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166567, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.551696] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166566, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.706299] env[68569]: DEBUG nova.compute.manager [req-1f75452f-dfe6-4f32-b4aa-46a933cedd2d req-0fab77cb-00a8-4b0b-8a94-09d5796377c3 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Received event network-changed-17651803-ec04-4913-b621-e685c574de0c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 639.707240] env[68569]: DEBUG nova.compute.manager [req-1f75452f-dfe6-4f32-b4aa-46a933cedd2d req-0fab77cb-00a8-4b0b-8a94-09d5796377c3 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Refreshing instance network info cache due to event network-changed-17651803-ec04-4913-b621-e685c574de0c. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 639.707240] env[68569]: DEBUG oslo_concurrency.lockutils [req-1f75452f-dfe6-4f32-b4aa-46a933cedd2d req-0fab77cb-00a8-4b0b-8a94-09d5796377c3 service nova] Acquiring lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.707240] env[68569]: DEBUG oslo_concurrency.lockutils [req-1f75452f-dfe6-4f32-b4aa-46a933cedd2d req-0fab77cb-00a8-4b0b-8a94-09d5796377c3 service nova] Acquired lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.707240] env[68569]: DEBUG nova.network.neutron [req-1f75452f-dfe6-4f32-b4aa-46a933cedd2d req-0fab77cb-00a8-4b0b-8a94-09d5796377c3 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Refreshing network info cache for port 17651803-ec04-4913-b621-e685c574de0c {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 639.737019] env[68569]: DEBUG nova.compute.manager [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 639.765600] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.776954] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Releasing lock "refresh_cache-c9264123-ab19-40d5-959a-791b8966d2f6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 639.776954] env[68569]: DEBUG nova.compute.manager [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Instance network_info: |[{"id": "9fcabc27-ebd3-48dd-bf0d-1829ee0f304f", "address": "fa:16:3e:b9:ae:48", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fcabc27-eb", "ovs_interfaceid": "9fcabc27-ebd3-48dd-bf0d-1829ee0f304f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 639.777239] env[68569]: DEBUG oslo_concurrency.lockutils [req-bb0c9d17-2456-4f56-a2c5-560c815cd892 req-e90bdaff-9352-4696-8911-79181d2df09a service nova] Acquired lock "refresh_cache-c9264123-ab19-40d5-959a-791b8966d2f6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.777239] env[68569]: DEBUG nova.network.neutron [req-bb0c9d17-2456-4f56-a2c5-560c815cd892 req-e90bdaff-9352-4696-8911-79181d2df09a service nova] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Refreshing network info cache for port 9fcabc27-ebd3-48dd-bf0d-1829ee0f304f {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 639.783163] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:ae:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9fcabc27-ebd3-48dd-bf0d-1829ee0f304f', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 639.794817] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 639.798412] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 639.798915] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c98acec-4cd3-42f7-a2a6-7c14b497ed58 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.819042] env[68569]: DEBUG nova.compute.manager [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 639.822708] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 639.822708] env[68569]: value = "task-3166568" [ 639.822708] env[68569]: _type = "Task" [ 639.822708] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.831091] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166568, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.862759] env[68569]: DEBUG nova.virt.hardware [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 639.863037] env[68569]: DEBUG nova.virt.hardware [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 639.863196] env[68569]: DEBUG nova.virt.hardware [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 639.863370] env[68569]: DEBUG nova.virt.hardware [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 639.863507] env[68569]: DEBUG nova.virt.hardware [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 639.863646] env[68569]: DEBUG nova.virt.hardware [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 639.864333] env[68569]: DEBUG nova.virt.hardware [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 639.864556] env[68569]: DEBUG nova.virt.hardware [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 639.864733] env[68569]: DEBUG nova.virt.hardware [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 639.864902] env[68569]: DEBUG nova.virt.hardware [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 639.865205] env[68569]: DEBUG nova.virt.hardware [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 639.866111] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878ae494-e886-4fdf-8e35-377bce20d2f1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.878136] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3cde51-066f-41cc-89a5-15e297a5b1fb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.888022] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.090s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.891989] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.566s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.891989] env[68569]: DEBUG nova.objects.instance [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Lazy-loading 'resources' on Instance uuid 8eb18d79-e164-4e66-83b0-7b40d04c30a8 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 639.902867] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 639.909387] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Creating folder: Project (2279caed17d14afc8c2edfda3c459e6a). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 639.910668] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7482fe90-95a3-46ab-8b1c-a3b29cf0ebc4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.916815] env[68569]: INFO nova.scheduler.client.report [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Deleted allocations for instance d6c45731-d76a-46cf-9b7d-be035a200948 [ 639.922555] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Created folder: Project (2279caed17d14afc8c2edfda3c459e6a) in parent group-v633430. [ 639.922763] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Creating folder: Instances. Parent ref: group-v633492. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 639.923015] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-79deff53-5519-4e8d-8f1a-83608b89d10b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.931432] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Created folder: Instances in parent group-v633492. [ 639.931651] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 639.931829] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 639.932038] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0102bacd-9cc4-4c1d-b3a2-78476090cd85 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.947798] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 639.947798] env[68569]: value = "task-3166571" [ 639.947798] env[68569]: _type = "Task" [ 639.947798] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.959569] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166571, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.040881] env[68569]: DEBUG oslo_vmware.api [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166567, 'name': PowerOffVM_Task, 'duration_secs': 0.356605} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.043117] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 640.043337] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 640.043608] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ad9ee90-5e09-4d2a-b988-1da0cd99e6f6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.058132] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166566, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566079} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.058132] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 40b768c1-9007-4f78-a90f-61b2ac64553f/40b768c1-9007-4f78-a90f-61b2ac64553f.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 640.058132] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 640.058132] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2798f555-0cfe-47ba-be52-b879f14e02c7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.065220] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 640.065220] env[68569]: value = "task-3166573" [ 640.065220] env[68569]: _type = "Task" [ 640.065220] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.080091] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166573, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.118140] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 640.118344] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 640.118623] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Deleting the datastore file [datastore2] 1c8dfb47-df19-4101-8d4e-30889d71d7da {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 640.121599] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e68eb46c-f530-4460-a765-679045c362ce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.127026] env[68569]: DEBUG oslo_vmware.api [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Waiting for the task: (returnval){ [ 640.127026] env[68569]: value = "task-3166574" [ 640.127026] env[68569]: _type = "Task" [ 640.127026] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.139448] env[68569]: DEBUG oslo_vmware.api [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.190710] env[68569]: DEBUG nova.network.neutron [req-bb0c9d17-2456-4f56-a2c5-560c815cd892 req-e90bdaff-9352-4696-8911-79181d2df09a service nova] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Updated VIF entry in instance network info cache for port 9fcabc27-ebd3-48dd-bf0d-1829ee0f304f. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 640.190946] env[68569]: DEBUG nova.network.neutron [req-bb0c9d17-2456-4f56-a2c5-560c815cd892 req-e90bdaff-9352-4696-8911-79181d2df09a service nova] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Updating instance_info_cache with network_info: [{"id": "9fcabc27-ebd3-48dd-bf0d-1829ee0f304f", "address": "fa:16:3e:b9:ae:48", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fcabc27-eb", "ovs_interfaceid": "9fcabc27-ebd3-48dd-bf0d-1829ee0f304f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.267397] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.333341] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166568, 'name': CreateVM_Task, 'duration_secs': 0.3538} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.333341] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 640.333836] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.334031] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 640.334883] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 640.334883] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61f179f5-c3a8-4190-b22e-721de372e309 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.339806] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 640.339806] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c4c8dc-0e28-8f63-dfbe-7be908a08d23" [ 640.339806] env[68569]: _type = "Task" [ 640.339806] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.347969] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c4c8dc-0e28-8f63-dfbe-7be908a08d23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.428864] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63710075-2ebf-4828-899a-03a729ae5fb1 tempest-TenantUsagesTestJSON-1542615870 tempest-TenantUsagesTestJSON-1542615870-project-member] Lock "d6c45731-d76a-46cf-9b7d-be035a200948" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.904s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.462044] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166571, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.575886] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166573, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07685} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.576812] env[68569]: DEBUG nova.network.neutron [req-1f75452f-dfe6-4f32-b4aa-46a933cedd2d req-0fab77cb-00a8-4b0b-8a94-09d5796377c3 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Updated VIF entry in instance network info cache for port 17651803-ec04-4913-b621-e685c574de0c. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 640.577223] env[68569]: DEBUG nova.network.neutron [req-1f75452f-dfe6-4f32-b4aa-46a933cedd2d req-0fab77cb-00a8-4b0b-8a94-09d5796377c3 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Updating instance_info_cache with network_info: [{"id": "17651803-ec04-4913-b621-e685c574de0c", "address": "fa:16:3e:d8:ec:c5", "network": {"id": "daa85e6b-305e-4981-87a7-9c7e6cdd4113", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-224395556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de1ec9d4fc3e45afb009f75ac86d5f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17651803-ec", "ovs_interfaceid": "17651803-ec04-4913-b621-e685c574de0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.578684] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 640.579273] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda3927b-bd97-41b0-81cb-97b938c423e4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.608860] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] 40b768c1-9007-4f78-a90f-61b2ac64553f/40b768c1-9007-4f78-a90f-61b2ac64553f.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 640.612077] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7df2bcec-6c65-4f92-bb4f-4e7b3f3d26ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.637354] env[68569]: DEBUG oslo_vmware.api [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Task: {'id': task-3166574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.230091} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.638743] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 640.638743] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 640.638743] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 640.638743] env[68569]: INFO nova.compute.manager [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Took 1.13 seconds to destroy the instance on the hypervisor. [ 640.639026] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 640.639253] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 640.639253] env[68569]: value = "task-3166575" [ 640.639253] env[68569]: _type = "Task" [ 640.639253] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.639452] env[68569]: DEBUG nova.compute.manager [-] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 640.639548] env[68569]: DEBUG nova.network.neutron [-] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 640.653811] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166575, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.699411] env[68569]: DEBUG oslo_concurrency.lockutils [req-bb0c9d17-2456-4f56-a2c5-560c815cd892 req-e90bdaff-9352-4696-8911-79181d2df09a service nova] Releasing lock "refresh_cache-c9264123-ab19-40d5-959a-791b8966d2f6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.852558] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c4c8dc-0e28-8f63-dfbe-7be908a08d23, 'name': SearchDatastore_Task, 'duration_secs': 0.013258} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.853008] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.853245] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 640.853758] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.853758] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 640.853951] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 640.856754] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d87f56d4-b8b1-42dc-b9c3-743e50d0d7de {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.867032] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 640.867186] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 640.867810] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56a98624-29df-48b9-ac3e-99b763af3179 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.876585] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 640.876585] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52afc430-c068-ef42-fe5b-e893c064e7ce" [ 640.876585] env[68569]: _type = "Task" [ 640.876585] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.890154] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52afc430-c068-ef42-fe5b-e893c064e7ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.922843] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cea1fe-5b51-4310-b3f1-2108082ce738 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.931052] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db8a2e53-cc5a-49d1-bf88-14f78e77f8e2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.971650] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15c609b-e3f4-4360-854a-ea504b55a05a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.981062] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166571, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.983377] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48541a7-8d4e-4090-b6de-5e8ceee8480a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.997471] env[68569]: DEBUG nova.compute.provider_tree [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.082950] env[68569]: DEBUG oslo_concurrency.lockutils [req-1f75452f-dfe6-4f32-b4aa-46a933cedd2d req-0fab77cb-00a8-4b0b-8a94-09d5796377c3 service nova] Releasing lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.152034] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166575, 'name': ReconfigVM_Task, 'duration_secs': 0.299314} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.152034] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Reconfigured VM instance instance-00000013 to attach disk [datastore2] 40b768c1-9007-4f78-a90f-61b2ac64553f/40b768c1-9007-4f78-a90f-61b2ac64553f.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 641.152034] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-563e4fe6-c9ac-47dc-9ec9-651f221128de {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.158311] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 641.158311] env[68569]: value = "task-3166576" [ 641.158311] env[68569]: _type = "Task" [ 641.158311] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.166222] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166576, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.252568] env[68569]: DEBUG nova.compute.manager [req-3c16fed5-b720-4f85-b00f-4f5b480827df req-1e2de7c2-d8a0-4717-a4b2-ae863ae8a29f service nova] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Received event network-vif-deleted-4a7f66da-42d4-4f36-9888-5bb341b4925c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 641.252764] env[68569]: INFO nova.compute.manager [req-3c16fed5-b720-4f85-b00f-4f5b480827df req-1e2de7c2-d8a0-4717-a4b2-ae863ae8a29f service nova] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Neutron deleted interface 4a7f66da-42d4-4f36-9888-5bb341b4925c; detaching it from the instance and deleting it from the info cache [ 641.252990] env[68569]: DEBUG nova.network.neutron [req-3c16fed5-b720-4f85-b00f-4f5b480827df req-1e2de7c2-d8a0-4717-a4b2-ae863ae8a29f service nova] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.393883] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52afc430-c068-ef42-fe5b-e893c064e7ce, 'name': SearchDatastore_Task, 'duration_secs': 0.019007} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.393883] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99c2f1fd-cc57-44c0-ab33-fac6420f3792 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.400303] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 641.400303] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5298fd1e-b536-e9e6-83b8-34e96f92e349" [ 641.400303] env[68569]: _type = "Task" [ 641.400303] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.408946] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5298fd1e-b536-e9e6-83b8-34e96f92e349, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.446737] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "6606b921-4f3a-44f5-ae4e-c600f26876fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.447477] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "6606b921-4f3a-44f5-ae4e-c600f26876fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.475759] env[68569]: DEBUG nova.network.neutron [-] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.479382] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166571, 'name': CreateVM_Task, 'duration_secs': 1.359246} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.479382] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 641.479382] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.479382] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.479382] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 641.479382] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67b3d4cd-268c-4d7c-976a-5e2193c64eee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.484046] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 641.484046] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521e8746-27ed-233a-947f-399e67c4c6e6" [ 641.484046] env[68569]: _type = "Task" [ 641.484046] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.493754] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521e8746-27ed-233a-947f-399e67c4c6e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.500905] env[68569]: DEBUG nova.scheduler.client.report [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 641.671826] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166576, 'name': Rename_Task, 'duration_secs': 0.149802} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.672191] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 641.672498] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d02f73d3-aa8a-477c-b5b0-508e0b37ded1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.679365] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 641.679365] env[68569]: value = "task-3166577" [ 641.679365] env[68569]: _type = "Task" [ 641.679365] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.687558] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166577, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.758193] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-152b7eab-35c1-41c0-8535-c858b3a379e2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.769661] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-436a76e9-56c4-428f-b065-47d3c2a7239d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.813867] env[68569]: DEBUG nova.compute.manager [req-3c16fed5-b720-4f85-b00f-4f5b480827df req-1e2de7c2-d8a0-4717-a4b2-ae863ae8a29f service nova] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Detach interface failed, port_id=4a7f66da-42d4-4f36-9888-5bb341b4925c, reason: Instance 1c8dfb47-df19-4101-8d4e-30889d71d7da could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 641.911519] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5298fd1e-b536-e9e6-83b8-34e96f92e349, 'name': SearchDatastore_Task, 'duration_secs': 0.01976} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.911519] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.913483] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] c9264123-ab19-40d5-959a-791b8966d2f6/c9264123-ab19-40d5-959a-791b8966d2f6.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 641.913483] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0d2d05c-feb8-4256-8846-cb7653ab74ce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.920137] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 641.920137] env[68569]: value = "task-3166578" [ 641.920137] env[68569]: _type = "Task" [ 641.920137] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.928505] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166578, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.981863] env[68569]: INFO nova.compute.manager [-] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Took 1.34 seconds to deallocate network for instance. [ 642.000769] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521e8746-27ed-233a-947f-399e67c4c6e6, 'name': SearchDatastore_Task, 'duration_secs': 0.031632} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.000769] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.000769] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 642.000769] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.001106] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 642.001106] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 642.001106] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-799997f6-29ce-4f43-8e8a-0b9115633f4c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.009220] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.118s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.011757] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.483s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.011757] env[68569]: DEBUG nova.objects.instance [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Lazy-loading 'resources' on Instance uuid 7693ef68-d7e5-4899-9615-9f2a1dd0bce8 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 642.013155] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 642.013467] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 642.018060] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b160438e-4c4f-442e-a59f-54ed3c46ffce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.025473] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 642.025473] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523f1fd5-1015-a502-7497-c393c3cd9a60" [ 642.025473] env[68569]: _type = "Task" [ 642.025473] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.034041] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523f1fd5-1015-a502-7497-c393c3cd9a60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.041233] env[68569]: INFO nova.scheduler.client.report [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Deleted allocations for instance 8eb18d79-e164-4e66-83b0-7b40d04c30a8 [ 642.193404] env[68569]: DEBUG oslo_vmware.api [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166577, 'name': PowerOnVM_Task, 'duration_secs': 0.471733} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.193723] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 642.193909] env[68569]: INFO nova.compute.manager [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Took 7.66 seconds to spawn the instance on the hypervisor. [ 642.195624] env[68569]: DEBUG nova.compute.manager [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 642.195624] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54004bfb-b6fd-4e5f-9e77-8d53e06d462d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.433067] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166578, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.493846] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.538333] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523f1fd5-1015-a502-7497-c393c3cd9a60, 'name': SearchDatastore_Task, 'duration_secs': 0.011996} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.539270] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2267bbf-ff7f-41de-9c01-ef980da27b06 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.548880] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 642.548880] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520abc24-7e57-5c8c-ceea-87d89a23f262" [ 642.548880] env[68569]: _type = "Task" [ 642.548880] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.571061] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8d167ab-4a5a-4b02-a189-8d24c658dd47 tempest-ServerDiagnosticsTest-161012758 tempest-ServerDiagnosticsTest-161012758-project-member] Lock "8eb18d79-e164-4e66-83b0-7b40d04c30a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.955s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.580799] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520abc24-7e57-5c8c-ceea-87d89a23f262, 'name': SearchDatastore_Task, 'duration_secs': 0.016941} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.582140] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.582643] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 9bb06e82-cc5c-4673-b1f6-aae87568aa9c/9bb06e82-cc5c-4673-b1f6-aae87568aa9c.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 642.582755] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5035ed9-4c85-433e-8296-3a3f4125a42c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.591080] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 642.591080] env[68569]: value = "task-3166579" [ 642.591080] env[68569]: _type = "Task" [ 642.591080] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.600841] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166579, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.716520] env[68569]: INFO nova.compute.manager [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Took 32.09 seconds to build instance. [ 642.942772] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166578, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.659452} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.943226] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] c9264123-ab19-40d5-959a-791b8966d2f6/c9264123-ab19-40d5-959a-791b8966d2f6.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 642.943529] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 642.943892] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6530229c-e90c-48c6-b1a8-b182c7bf723e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.953553] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 642.953553] env[68569]: value = "task-3166580" [ 642.953553] env[68569]: _type = "Task" [ 642.953553] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.963733] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166580, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.057707] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed163c5a-3d2e-4ce5-a8bc-40af7ee61621 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.066971] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfef0176-a08f-43fb-a080-619ff6261e8e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.108077] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c615aae-aa30-46e8-b034-e013a7441179 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.118979] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166579, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.123939] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075b4de9-d0a7-4dcd-9759-2ac1e538a085 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.138797] env[68569]: DEBUG nova.compute.provider_tree [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 643.221088] env[68569]: DEBUG oslo_concurrency.lockutils [None req-715ffd54-fff0-44e4-8e7c-15ba96cd249c tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "40b768c1-9007-4f78-a90f-61b2ac64553f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.627s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 643.466733] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166580, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067245} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.469567] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 643.472428] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9cf1d43-cc79-4866-a83c-f8e1efa3bd60 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.503176] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] c9264123-ab19-40d5-959a-791b8966d2f6/c9264123-ab19-40d5-959a-791b8966d2f6.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 643.506218] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e70887d-d434-4f69-be59-cedc59293acf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.527845] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 643.527845] env[68569]: value = "task-3166581" [ 643.527845] env[68569]: _type = "Task" [ 643.527845] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.538038] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166581, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.616906] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166579, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.7769} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.617389] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 9bb06e82-cc5c-4673-b1f6-aae87568aa9c/9bb06e82-cc5c-4673-b1f6-aae87568aa9c.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 643.617759] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 643.618145] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76ea0c17-d154-49a3-ab16-10414a7f3446 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.625654] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 643.625654] env[68569]: value = "task-3166582" [ 643.625654] env[68569]: _type = "Task" [ 643.625654] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.636560] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166582, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.677422] env[68569]: ERROR nova.scheduler.client.report [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [req-acf91a6b-ecf3-4eb7-aa00-75b687e736cc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-acf91a6b-ecf3-4eb7-aa00-75b687e736cc"}]} [ 643.710255] env[68569]: DEBUG nova.scheduler.client.report [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 643.724409] env[68569]: DEBUG nova.compute.manager [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 643.731144] env[68569]: DEBUG nova.scheduler.client.report [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 643.731361] env[68569]: DEBUG nova.compute.provider_tree [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 643.755159] env[68569]: DEBUG nova.scheduler.client.report [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 643.788472] env[68569]: DEBUG nova.scheduler.client.report [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 644.043351] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.141028] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166582, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.241177} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.142251] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 644.143787] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10195d9-25a1-4a43-92bf-714e0d36d1d6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.172591] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] 9bb06e82-cc5c-4673-b1f6-aae87568aa9c/9bb06e82-cc5c-4673-b1f6-aae87568aa9c.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 644.177617] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2559e989-37f3-408b-abd6-0615a4f2e6ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.199273] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1793bef-7e0d-4734-b98b-883b2a48d30f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.203532] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 644.203532] env[68569]: value = "task-3166583" [ 644.203532] env[68569]: _type = "Task" [ 644.203532] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.209420] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8acafa00-d6a0-4026-b71a-b7d6871460e0 tempest-ServersAdminNegativeTestJSON-37703296 tempest-ServersAdminNegativeTestJSON-37703296-project-admin] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Suspending the VM {{(pid=68569) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 644.212813] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-bbf32c77-e861-4a19-8e6b-c0b08b9aff67 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.219341] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166583, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.227802] env[68569]: DEBUG oslo_vmware.api [None req-8acafa00-d6a0-4026-b71a-b7d6871460e0 tempest-ServersAdminNegativeTestJSON-37703296 tempest-ServersAdminNegativeTestJSON-37703296-project-admin] Waiting for the task: (returnval){ [ 644.227802] env[68569]: value = "task-3166584" [ 644.227802] env[68569]: _type = "Task" [ 644.227802] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.241070] env[68569]: DEBUG oslo_vmware.api [None req-8acafa00-d6a0-4026-b71a-b7d6871460e0 tempest-ServersAdminNegativeTestJSON-37703296 tempest-ServersAdminNegativeTestJSON-37703296-project-admin] Task: {'id': task-3166584, 'name': SuspendVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.271269] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.277048] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Acquiring lock "2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.277393] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Lock "2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 644.466458] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad75c64-af36-41a5-8e5b-a58414f9a822 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.481023] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c89d090-d81d-4ef2-9b2c-149bd04f9247 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.536329] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670d205e-d20a-46de-92e6-4aa11955f32f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.552952] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166581, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.554455] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7517b2d4-d5fe-4726-9a43-fe0a6cf60fb6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.572095] env[68569]: DEBUG nova.compute.provider_tree [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 644.718229] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166583, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.738747] env[68569]: DEBUG oslo_vmware.api [None req-8acafa00-d6a0-4026-b71a-b7d6871460e0 tempest-ServersAdminNegativeTestJSON-37703296 tempest-ServersAdminNegativeTestJSON-37703296-project-admin] Task: {'id': task-3166584, 'name': SuspendVM_Task} progress is 50%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.047246] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166581, 'name': ReconfigVM_Task, 'duration_secs': 1.070526} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.047585] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Reconfigured VM instance instance-00000014 to attach disk [datastore2] c9264123-ab19-40d5-959a-791b8966d2f6/c9264123-ab19-40d5-959a-791b8966d2f6.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 645.049150] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-896820a5-8632-4e0a-bfb2-3c1954c46f20 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.062936] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 645.062936] env[68569]: value = "task-3166585" [ 645.062936] env[68569]: _type = "Task" [ 645.062936] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.071861] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166585, 'name': Rename_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.098872] env[68569]: ERROR nova.scheduler.client.report [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] [req-e552c86b-a91f-48c7-9983-77e47586a404] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e552c86b-a91f-48c7-9983-77e47586a404"}]} [ 645.120305] env[68569]: DEBUG nova.scheduler.client.report [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 645.144761] env[68569]: DEBUG nova.scheduler.client.report [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 645.145204] env[68569]: DEBUG nova.compute.provider_tree [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 645.161721] env[68569]: DEBUG nova.scheduler.client.report [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 645.183684] env[68569]: DEBUG nova.scheduler.client.report [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 645.221992] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166583, 'name': ReconfigVM_Task, 'duration_secs': 0.854155} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.227019] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Reconfigured VM instance instance-00000015 to attach disk [datastore2] 9bb06e82-cc5c-4673-b1f6-aae87568aa9c/9bb06e82-cc5c-4673-b1f6-aae87568aa9c.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 645.228026] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1d5cde4-77c5-4bb9-8b0e-d3c34c3726a7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.242170] env[68569]: DEBUG oslo_vmware.api [None req-8acafa00-d6a0-4026-b71a-b7d6871460e0 tempest-ServersAdminNegativeTestJSON-37703296 tempest-ServersAdminNegativeTestJSON-37703296-project-admin] Task: {'id': task-3166584, 'name': SuspendVM_Task, 'duration_secs': 0.958771} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.243691] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8acafa00-d6a0-4026-b71a-b7d6871460e0 tempest-ServersAdminNegativeTestJSON-37703296 tempest-ServersAdminNegativeTestJSON-37703296-project-admin] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Suspended the VM {{(pid=68569) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 645.243874] env[68569]: DEBUG nova.compute.manager [None req-8acafa00-d6a0-4026-b71a-b7d6871460e0 tempest-ServersAdminNegativeTestJSON-37703296 tempest-ServersAdminNegativeTestJSON-37703296-project-admin] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 645.244333] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 645.244333] env[68569]: value = "task-3166586" [ 645.244333] env[68569]: _type = "Task" [ 645.244333] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.245084] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d820c593-3af4-4fc0-b685-d93182c418da {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.275271] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166586, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.577018] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166585, 'name': Rename_Task, 'duration_secs': 0.197537} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.579869] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 645.580352] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-433e8d44-bf20-49f4-8694-4ea514844cae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.590444] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 645.590444] env[68569]: value = "task-3166587" [ 645.590444] env[68569]: _type = "Task" [ 645.590444] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.602038] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166587, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.726530] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9632f799-5722-4d94-92a1-e278c98a32e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.735267] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd41362-81e2-4f55-877b-a41ab206c239 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.771318] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34873d73-ae0b-4c17-86d5-b2d559f5e9ef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.785767] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166586, 'name': Rename_Task, 'duration_secs': 0.160947} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.786888] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 645.788169] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4269afd7-b0ce-4a15-8c1e-bdd448b831c5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.794116] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f60478e-047e-470d-ab60-bffdbdd69e42 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.810775] env[68569]: DEBUG nova.compute.provider_tree [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 645.813130] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 645.813130] env[68569]: value = "task-3166588" [ 645.813130] env[68569]: _type = "Task" [ 645.813130] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.823092] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166588, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.103414] env[68569]: DEBUG oslo_vmware.api [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166587, 'name': PowerOnVM_Task, 'duration_secs': 0.460267} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.104178] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 646.104268] env[68569]: INFO nova.compute.manager [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Took 8.93 seconds to spawn the instance on the hypervisor. [ 646.104441] env[68569]: DEBUG nova.compute.manager [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 646.105575] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728d2d17-9198-42f0-9d00-97b524c9aaf7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.330438] env[68569]: DEBUG oslo_vmware.api [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166588, 'name': PowerOnVM_Task, 'duration_secs': 0.450935} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.331365] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 646.331365] env[68569]: INFO nova.compute.manager [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Took 6.51 seconds to spawn the instance on the hypervisor. [ 646.331365] env[68569]: DEBUG nova.compute.manager [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 646.333207] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465a91cd-bceb-438b-9370-5f8788478f51 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.384627] env[68569]: DEBUG nova.scheduler.client.report [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 45 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 646.385192] env[68569]: DEBUG nova.compute.provider_tree [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 45 to 46 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 646.385441] env[68569]: DEBUG nova.compute.provider_tree [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 646.633136] env[68569]: INFO nova.compute.manager [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Took 33.12 seconds to build instance. [ 646.858781] env[68569]: INFO nova.compute.manager [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Took 31.66 seconds to build instance. [ 646.891318] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.880s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.894816] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.441s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.899028] env[68569]: INFO nova.compute.claims [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 646.925833] env[68569]: INFO nova.scheduler.client.report [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Deleted allocations for instance 7693ef68-d7e5-4899-9615-9f2a1dd0bce8 [ 647.136775] env[68569]: DEBUG oslo_concurrency.lockutils [None req-db448923-e1d8-4ce0-bf7a-03fb2a98e983 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "c9264123-ab19-40d5-959a-791b8966d2f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.297s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.363048] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eaada2c-8e48-43a4-8fc4-5875e5893409 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Lock "9bb06e82-cc5c-4673-b1f6-aae87568aa9c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.658s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.435242] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e273442-57ce-4efb-beeb-7d861c770c24 tempest-ServerDiagnosticsV248Test-1254147053 tempest-ServerDiagnosticsV248Test-1254147053-project-member] Lock "7693ef68-d7e5-4899-9615-9f2a1dd0bce8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.433s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.643327] env[68569]: DEBUG nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 647.866450] env[68569]: DEBUG nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 648.178358] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.208855] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "de2b0206-0c73-4275-89ff-37199520dd71" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.209294] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "de2b0206-0c73-4275-89ff-37199520dd71" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.393161] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.499792] env[68569]: DEBUG nova.compute.manager [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 648.508827] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062a2c7a-5be6-4d9b-a3ff-b63eadee13d7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.542371] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d67b9c-f88f-4dfd-be7f-8f4355484241 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.555520] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5afe1b21-ec6a-46e9-aae9-a75c690424bd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.593184] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af4e2da-a1c1-4c2a-a7d1-f368fc8b81da {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.608194] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f3f123-f1e1-4533-9cb2-2224809a2897 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.625453] env[68569]: DEBUG nova.compute.provider_tree [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.021159] env[68569]: INFO nova.compute.manager [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] instance snapshotting [ 649.027270] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80744a53-8552-4eef-aaa2-751dfdad39aa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.050069] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21074ae3-c397-4bf3-bf51-493e967e1e11 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.129735] env[68569]: DEBUG nova.scheduler.client.report [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 649.230360] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "eec09a1c-e8b2-4b6a-9545-e190e1f965d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.230789] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "eec09a1c-e8b2-4b6a-9545-e190e1f965d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.480633] env[68569]: INFO nova.compute.manager [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Rebuilding instance [ 649.550847] env[68569]: DEBUG nova.compute.manager [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 649.551371] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f32bc99-a59e-4f85-87c6-5178c069c04e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.565947] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 649.566444] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b59df425-13c3-4c39-a887-b7c6fe0826c5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.576066] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 649.576066] env[68569]: value = "task-3166589" [ 649.576066] env[68569]: _type = "Task" [ 649.576066] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.588678] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166589, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.642984] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.746s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.642984] env[68569]: DEBUG nova.compute.manager [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 649.644271] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.709s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.645805] env[68569]: INFO nova.compute.claims [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.034531] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Acquiring lock "77b9756e-2299-47e2-a6d8-e8026e33a3de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.034741] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Lock "77b9756e-2299-47e2-a6d8-e8026e33a3de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.086195] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166589, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.143799] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "0c4d4d93-89bf-4164-973b-af48278a3915" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.143799] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "0c4d4d93-89bf-4164-973b-af48278a3915" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.152432] env[68569]: DEBUG nova.compute.utils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 650.161029] env[68569]: DEBUG nova.compute.manager [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 650.161225] env[68569]: DEBUG nova.network.neutron [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 650.291160] env[68569]: DEBUG nova.policy [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '715937edaba643339195f77bb00fe05d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '062b8ddb5f0d46d08425a66db32471be', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 650.574645] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 650.574645] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a4cd4fd-efda-4dec-b8d9-df59e6a8d202 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.585923] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 650.585923] env[68569]: value = "task-3166590" [ 650.585923] env[68569]: _type = "Task" [ 650.585923] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.589473] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166589, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.596754] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166590, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.657957] env[68569]: DEBUG nova.compute.manager [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 650.954618] env[68569]: DEBUG nova.network.neutron [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Successfully created port: 7ec66ae2-2713-4784-8a3e-bba562877a03 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 651.094346] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166589, 'name': CreateSnapshot_Task, 'duration_secs': 1.02269} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.095658] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 651.096485] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b014d76-bbd1-4bb6-b0de-e300717dc492 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.104631] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166590, 'name': PowerOffVM_Task, 'duration_secs': 0.141464} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.105948] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 651.105948] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 651.106785] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95331ab3-e0a6-42a6-8e6d-69b6a3637359 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.126179] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 651.126179] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6c67e63-b222-4dbc-968a-c9e258ec8c2d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.154306] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 651.154548] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 651.154730] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Deleting the datastore file [datastore2] 9bb06e82-cc5c-4673-b1f6-aae87568aa9c {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.155930] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a120401-24cf-4713-9b1d-ef08dc4f06ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.168817] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 651.168817] env[68569]: value = "task-3166592" [ 651.168817] env[68569]: _type = "Task" [ 651.168817] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.182326] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.201472] env[68569]: DEBUG oslo_concurrency.lockutils [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "40b768c1-9007-4f78-a90f-61b2ac64553f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.201472] env[68569]: DEBUG oslo_concurrency.lockutils [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "40b768c1-9007-4f78-a90f-61b2ac64553f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.201472] env[68569]: DEBUG oslo_concurrency.lockutils [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "40b768c1-9007-4f78-a90f-61b2ac64553f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.201472] env[68569]: DEBUG oslo_concurrency.lockutils [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "40b768c1-9007-4f78-a90f-61b2ac64553f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.203073] env[68569]: DEBUG oslo_concurrency.lockutils [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "40b768c1-9007-4f78-a90f-61b2ac64553f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 651.205350] env[68569]: INFO nova.compute.manager [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Terminating instance [ 651.284818] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bfc9cff-1d58-4e62-b8ba-0225b5e9686e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.291065] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557c154a-5cf1-4989-8824-90be4e1c75de {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.326974] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad29ab82-5dc6-43a8-aa13-25934e2f03e7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.332720] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e577a72c-0d60-4b92-bc58-4e448b0ade48 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.347354] env[68569]: DEBUG nova.compute.provider_tree [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.571850] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.572119] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.628618] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 651.629009] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c5b539b6-0246-4303-bbeb-a9a9d53547ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.640121] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 651.640121] env[68569]: value = "task-3166593" [ 651.640121] env[68569]: _type = "Task" [ 651.640121] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.647857] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166593, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.671661] env[68569]: DEBUG nova.compute.manager [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 651.682604] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166592, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094427} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.682938] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 651.683123] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 651.683330] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 651.714917] env[68569]: DEBUG nova.virt.hardware [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 651.717109] env[68569]: DEBUG nova.virt.hardware [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 651.717109] env[68569]: DEBUG nova.virt.hardware [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 651.717109] env[68569]: DEBUG nova.virt.hardware [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 651.717109] env[68569]: DEBUG nova.virt.hardware [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 651.717109] env[68569]: DEBUG nova.virt.hardware [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 651.717338] env[68569]: DEBUG nova.virt.hardware [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 651.717338] env[68569]: DEBUG nova.virt.hardware [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 651.717338] env[68569]: DEBUG nova.virt.hardware [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 651.717338] env[68569]: DEBUG nova.virt.hardware [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 651.717338] env[68569]: DEBUG nova.virt.hardware [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 651.718223] env[68569]: DEBUG nova.compute.manager [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 651.718500] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 651.720209] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33cf071-4cc9-4314-8ab3-7f7fe16eafcf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.723240] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67c48ae-4bc3-426a-b2bb-6eedc681577f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.733768] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02adb8ca-ac25-4913-8cf8-2a57ee789851 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.738406] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 651.739333] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc7e4d94-3bf2-491c-850e-7d72d8e55788 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.830080] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 651.830080] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 651.830080] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Deleting the datastore file [datastore2] 40b768c1-9007-4f78-a90f-61b2ac64553f {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.830261] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a078047-5313-4405-8dc1-22b86ea0e99f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.837808] env[68569]: DEBUG oslo_vmware.api [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 651.837808] env[68569]: value = "task-3166595" [ 651.837808] env[68569]: _type = "Task" [ 651.837808] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.848812] env[68569]: DEBUG oslo_vmware.api [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166595, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.853018] env[68569]: DEBUG nova.scheduler.client.report [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 652.155431] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166593, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.255898] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Acquiring lock "16b6fafe-524d-482f-961b-10e3601ac4c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.256161] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Lock "16b6fafe-524d-482f-961b-10e3601ac4c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.347988] env[68569]: DEBUG oslo_vmware.api [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166595, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164994} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.348369] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 652.348586] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 652.348786] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 652.348960] env[68569]: INFO nova.compute.manager [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Took 0.63 seconds to destroy the instance on the hypervisor. [ 652.349269] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 652.349531] env[68569]: DEBUG nova.compute.manager [-] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 652.349624] env[68569]: DEBUG nova.network.neutron [-] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 652.357690] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.713s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.359413] env[68569]: DEBUG nova.compute.manager [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 652.362532] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.010s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.364259] env[68569]: INFO nova.compute.claims [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 652.652554] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166593, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.724566] env[68569]: DEBUG nova.virt.hardware [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 652.724566] env[68569]: DEBUG nova.virt.hardware [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 652.724566] env[68569]: DEBUG nova.virt.hardware [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 652.725012] env[68569]: DEBUG nova.virt.hardware [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 652.725012] env[68569]: DEBUG nova.virt.hardware [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 652.725115] env[68569]: DEBUG nova.virt.hardware [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 652.726594] env[68569]: DEBUG nova.virt.hardware [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 652.726691] env[68569]: DEBUG nova.virt.hardware [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 652.727287] env[68569]: DEBUG nova.virt.hardware [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 652.727704] env[68569]: DEBUG nova.virt.hardware [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 652.727903] env[68569]: DEBUG nova.virt.hardware [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 652.728911] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdba8bae-9c41-4db2-85ae-404fc710141c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.740038] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc25ea80-20b1-484f-a29c-1bdd8ada11c4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.759750] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 652.765415] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 652.765758] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 652.765942] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ad98f14-0440-4792-a344-af862008abb1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.784886] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.784886] env[68569]: value = "task-3166596" [ 652.784886] env[68569]: _type = "Task" [ 652.784886] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.796113] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166596, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.869447] env[68569]: DEBUG nova.compute.utils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 652.873237] env[68569]: DEBUG nova.compute.manager [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 652.875268] env[68569]: DEBUG nova.network.neutron [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 652.977377] env[68569]: DEBUG nova.policy [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '330a73f609f746d8b8c1a7eefe557c69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'decd2576711b41bbb25300d9db62643e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 653.045229] env[68569]: DEBUG nova.compute.manager [req-8a184776-9779-47d8-8cc0-1b2fc361c5e0 req-e53486b4-f397-4406-b2cf-0035bb4ef41b service nova] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Received event network-vif-plugged-7ec66ae2-2713-4784-8a3e-bba562877a03 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 653.045466] env[68569]: DEBUG oslo_concurrency.lockutils [req-8a184776-9779-47d8-8cc0-1b2fc361c5e0 req-e53486b4-f397-4406-b2cf-0035bb4ef41b service nova] Acquiring lock "239c1217-ba8e-474a-b02c-7d85e3ac92f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.045687] env[68569]: DEBUG oslo_concurrency.lockutils [req-8a184776-9779-47d8-8cc0-1b2fc361c5e0 req-e53486b4-f397-4406-b2cf-0035bb4ef41b service nova] Lock "239c1217-ba8e-474a-b02c-7d85e3ac92f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.045853] env[68569]: DEBUG oslo_concurrency.lockutils [req-8a184776-9779-47d8-8cc0-1b2fc361c5e0 req-e53486b4-f397-4406-b2cf-0035bb4ef41b service nova] Lock "239c1217-ba8e-474a-b02c-7d85e3ac92f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.046763] env[68569]: DEBUG nova.compute.manager [req-8a184776-9779-47d8-8cc0-1b2fc361c5e0 req-e53486b4-f397-4406-b2cf-0035bb4ef41b service nova] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] No waiting events found dispatching network-vif-plugged-7ec66ae2-2713-4784-8a3e-bba562877a03 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 653.047063] env[68569]: WARNING nova.compute.manager [req-8a184776-9779-47d8-8cc0-1b2fc361c5e0 req-e53486b4-f397-4406-b2cf-0035bb4ef41b service nova] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Received unexpected event network-vif-plugged-7ec66ae2-2713-4784-8a3e-bba562877a03 for instance with vm_state building and task_state spawning. [ 653.156825] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166593, 'name': CloneVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.159891] env[68569]: DEBUG nova.network.neutron [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Successfully updated port: 7ec66ae2-2713-4784-8a3e-bba562877a03 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 653.214105] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Acquiring lock "9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.214338] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Lock "9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.296563] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166596, 'name': CreateVM_Task, 'duration_secs': 0.358419} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.296859] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 653.297180] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.297350] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.297664] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 653.297917] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-151384dd-4d7e-439f-ae0c-97714f46bd28 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.305245] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 653.305245] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52141dc9-7d69-5d42-4e69-1d0d19b0839a" [ 653.305245] env[68569]: _type = "Task" [ 653.305245] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.312307] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52141dc9-7d69-5d42-4e69-1d0d19b0839a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.382243] env[68569]: DEBUG nova.compute.manager [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 653.392189] env[68569]: DEBUG nova.network.neutron [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Successfully created port: 43722f3d-a2c3-44a7-9b31-b2477b2d126e {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 653.484028] env[68569]: DEBUG nova.network.neutron [-] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.657579] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166593, 'name': CloneVM_Task, 'duration_secs': 1.527846} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.657579] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Created linked-clone VM from snapshot [ 653.657579] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0599dc8-f431-470c-a58b-425d1b2e6c17 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.666448] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "refresh_cache-239c1217-ba8e-474a-b02c-7d85e3ac92f4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.666641] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquired lock "refresh_cache-239c1217-ba8e-474a-b02c-7d85e3ac92f4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.666769] env[68569]: DEBUG nova.network.neutron [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 653.669532] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Uploading image c992fabc-ac8f-45c3-9598-140c068ec2ed {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 653.704076] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 653.704076] env[68569]: value = "vm-633496" [ 653.704076] env[68569]: _type = "VirtualMachine" [ 653.704076] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 653.704350] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-49c89933-d822-4956-bb2b-5a2f5e4db829 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.715088] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lease: (returnval){ [ 653.715088] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f2e60e-ecd2-4293-dc99-3c6ee83327ab" [ 653.715088] env[68569]: _type = "HttpNfcLease" [ 653.715088] env[68569]: } obtained for exporting VM: (result){ [ 653.715088] env[68569]: value = "vm-633496" [ 653.715088] env[68569]: _type = "VirtualMachine" [ 653.715088] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 653.715772] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the lease: (returnval){ [ 653.715772] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f2e60e-ecd2-4293-dc99-3c6ee83327ab" [ 653.715772] env[68569]: _type = "HttpNfcLease" [ 653.715772] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 653.723898] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 653.723898] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f2e60e-ecd2-4293-dc99-3c6ee83327ab" [ 653.723898] env[68569]: _type = "HttpNfcLease" [ 653.723898] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 653.819492] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52141dc9-7d69-5d42-4e69-1d0d19b0839a, 'name': SearchDatastore_Task, 'duration_secs': 0.010152} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.819794] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.820030] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 653.820639] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.820825] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.821037] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 653.821283] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9961bde-eaef-43d1-9b9c-ea6d9325d33f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.830555] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 653.830555] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 653.831808] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27e4a76d-60e5-41f6-82ec-6dc5443ba048 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.837404] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 653.837404] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5280d9ef-5871-e7cc-dbc4-8ba45fe268f1" [ 653.837404] env[68569]: _type = "Task" [ 653.837404] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.853177] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5280d9ef-5871-e7cc-dbc4-8ba45fe268f1, 'name': SearchDatastore_Task, 'duration_secs': 0.009973} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.854259] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1aba9eba-c8bc-46c1-a036-06a848cf00d9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.859498] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 653.859498] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521a55c2-0f4f-d322-1a30-6e5d838be928" [ 653.859498] env[68569]: _type = "Task" [ 653.859498] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.871021] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521a55c2-0f4f-d322-1a30-6e5d838be928, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.991393] env[68569]: INFO nova.compute.manager [-] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Took 1.64 seconds to deallocate network for instance. [ 654.036743] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd01710-a3e4-42a5-bfbc-8bb993f4aa7e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.045315] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606eae4e-df93-4c1a-9776-b99d6f9da16a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.076319] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e71e55b-4c5a-44cb-8eb2-a35dc3f7b1de {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.084251] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5add2e0-bc38-492e-9197-93c467307010 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.097324] env[68569]: DEBUG nova.compute.provider_tree [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.222425] env[68569]: DEBUG nova.network.neutron [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 654.228214] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 654.228214] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f2e60e-ecd2-4293-dc99-3c6ee83327ab" [ 654.228214] env[68569]: _type = "HttpNfcLease" [ 654.228214] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 654.228569] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 654.228569] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f2e60e-ecd2-4293-dc99-3c6ee83327ab" [ 654.228569] env[68569]: _type = "HttpNfcLease" [ 654.228569] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 654.229314] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8585e8-03c3-4d16-8aea-d059c5858fe9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.237786] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527dc3a4-97a1-b3c2-8545-4b1b92cbf9e4/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 654.238008] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527dc3a4-97a1-b3c2-8545-4b1b92cbf9e4/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 654.336077] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-890f938e-8751-4e21-9dbf-8d2ff272e4f2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.382386] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521a55c2-0f4f-d322-1a30-6e5d838be928, 'name': SearchDatastore_Task, 'duration_secs': 0.011387} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.385139] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.386312] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 9bb06e82-cc5c-4673-b1f6-aae87568aa9c/9bb06e82-cc5c-4673-b1f6-aae87568aa9c.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 654.386312] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d0947f4-58b7-4191-aa06-7b673198f1dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.392861] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 654.392861] env[68569]: value = "task-3166598" [ 654.392861] env[68569]: _type = "Task" [ 654.392861] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.397716] env[68569]: DEBUG nova.compute.manager [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 654.402151] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166598, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.434487] env[68569]: DEBUG nova.virt.hardware [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 654.434487] env[68569]: DEBUG nova.virt.hardware [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 654.434487] env[68569]: DEBUG nova.virt.hardware [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 654.434686] env[68569]: DEBUG nova.virt.hardware [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 654.434686] env[68569]: DEBUG nova.virt.hardware [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 654.434686] env[68569]: DEBUG nova.virt.hardware [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 654.434686] env[68569]: DEBUG nova.virt.hardware [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 654.434686] env[68569]: DEBUG nova.virt.hardware [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 654.434856] env[68569]: DEBUG nova.virt.hardware [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 654.434891] env[68569]: DEBUG nova.virt.hardware [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 654.439036] env[68569]: DEBUG nova.virt.hardware [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 654.439036] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537baf05-db90-429c-8d81-f98871c356f7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.447096] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e8814c-b0ff-4f12-9298-5b687d510020 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.500934] env[68569]: DEBUG oslo_concurrency.lockutils [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.506156] env[68569]: DEBUG nova.network.neutron [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Updating instance_info_cache with network_info: [{"id": "7ec66ae2-2713-4784-8a3e-bba562877a03", "address": "fa:16:3e:bd:42:bb", "network": {"id": "7738c508-0306-4079-a892-0554c562ce54", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1732251555-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "062b8ddb5f0d46d08425a66db32471be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ec66ae2-27", "ovs_interfaceid": "7ec66ae2-2713-4784-8a3e-bba562877a03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.603646] env[68569]: DEBUG nova.scheduler.client.report [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 654.909271] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166598, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50784} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.909735] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 9bb06e82-cc5c-4673-b1f6-aae87568aa9c/9bb06e82-cc5c-4673-b1f6-aae87568aa9c.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 654.910038] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 654.910351] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-38fc4c26-fac9-4bf4-9856-ea0b736efbe6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.918011] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 654.918011] env[68569]: value = "task-3166599" [ 654.918011] env[68569]: _type = "Task" [ 654.918011] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.926791] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166599, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.013329] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Releasing lock "refresh_cache-239c1217-ba8e-474a-b02c-7d85e3ac92f4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.013329] env[68569]: DEBUG nova.compute.manager [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Instance network_info: |[{"id": "7ec66ae2-2713-4784-8a3e-bba562877a03", "address": "fa:16:3e:bd:42:bb", "network": {"id": "7738c508-0306-4079-a892-0554c562ce54", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1732251555-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "062b8ddb5f0d46d08425a66db32471be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ec66ae2-27", "ovs_interfaceid": "7ec66ae2-2713-4784-8a3e-bba562877a03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 655.013553] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:42:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f52a458-d157-48a3-b4e2-b8cc0779afe2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7ec66ae2-2713-4784-8a3e-bba562877a03', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 655.030083] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Creating folder: Project (062b8ddb5f0d46d08425a66db32471be). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 655.030576] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ddea869-792f-4c20-ade6-63347a256ea9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.043456] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Created folder: Project (062b8ddb5f0d46d08425a66db32471be) in parent group-v633430. [ 655.044084] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Creating folder: Instances. Parent ref: group-v633498. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 655.044176] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64515f18-3da0-4890-bbb0-e74baf8d73e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.055558] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Created folder: Instances in parent group-v633498. [ 655.058189] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 655.058518] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 655.058844] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b36f517d-1d06-4d95-bec3-db94e2386c5d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.085060] env[68569]: DEBUG nova.network.neutron [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Successfully updated port: 43722f3d-a2c3-44a7-9b31-b2477b2d126e {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 655.094115] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 655.094115] env[68569]: value = "task-3166602" [ 655.094115] env[68569]: _type = "Task" [ 655.094115] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.110564] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "398dd3c7-c630-4a29-b204-80f6fb394ce8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.110649] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "398dd3c7-c630-4a29-b204-80f6fb394ce8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.110817] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166602, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.111607] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.749s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.112350] env[68569]: DEBUG nova.compute.manager [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 655.115768] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.040s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.117279] env[68569]: INFO nova.compute.claims [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.282361] env[68569]: DEBUG nova.compute.manager [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Received event network-vif-deleted-ce8c8a30-9a8b-4015-a5db-09a460d5765b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 655.284150] env[68569]: DEBUG nova.compute.manager [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Received event network-changed-7ec66ae2-2713-4784-8a3e-bba562877a03 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 655.284150] env[68569]: DEBUG nova.compute.manager [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Refreshing instance network info cache due to event network-changed-7ec66ae2-2713-4784-8a3e-bba562877a03. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 655.284681] env[68569]: DEBUG oslo_concurrency.lockutils [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] Acquiring lock "refresh_cache-239c1217-ba8e-474a-b02c-7d85e3ac92f4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.284681] env[68569]: DEBUG oslo_concurrency.lockutils [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] Acquired lock "refresh_cache-239c1217-ba8e-474a-b02c-7d85e3ac92f4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.285479] env[68569]: DEBUG nova.network.neutron [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Refreshing network info cache for port 7ec66ae2-2713-4784-8a3e-bba562877a03 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 655.431806] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166599, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.136261} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.432284] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 655.433124] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19967b44-48c7-4cfa-aca7-d50ada20ba37 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.455960] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] 9bb06e82-cc5c-4673-b1f6-aae87568aa9c/9bb06e82-cc5c-4673-b1f6-aae87568aa9c.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 655.456652] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81ddc3da-1ca8-40f7-a21e-ccf1a807d5be {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.478116] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 655.478116] env[68569]: value = "task-3166603" [ 655.478116] env[68569]: _type = "Task" [ 655.478116] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.487167] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166603, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.590513] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "refresh_cache-303f41c0-7a19-48b2-a072-4f138f6f8156" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.590673] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "refresh_cache-303f41c0-7a19-48b2-a072-4f138f6f8156" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.590829] env[68569]: DEBUG nova.network.neutron [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 655.608389] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "b40c9dec-cebc-4d23-8df4-96e804333706" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.608670] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "b40c9dec-cebc-4d23-8df4-96e804333706" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.617192] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166602, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.621323] env[68569]: DEBUG nova.compute.utils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 655.627343] env[68569]: DEBUG nova.compute.manager [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 655.627977] env[68569]: DEBUG nova.network.neutron [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 655.672942] env[68569]: DEBUG nova.policy [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f53c5668eb3c431592f00aab742d14d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '128ddc15398c4aadb4212393c51670dd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 655.993580] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166603, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.106805] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166602, 'name': CreateVM_Task, 'duration_secs': 0.702439} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.107376] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 656.108606] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.108786] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.109298] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 656.109566] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86573717-4d22-4e7f-8a5f-0fc1d8bd27cb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.115113] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 656.115113] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522b09a6-eae9-4a5f-42bd-b99c576792e3" [ 656.115113] env[68569]: _type = "Task" [ 656.115113] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.125805] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522b09a6-eae9-4a5f-42bd-b99c576792e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.131417] env[68569]: DEBUG nova.compute.manager [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 656.172848] env[68569]: DEBUG nova.network.neutron [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 656.177997] env[68569]: DEBUG nova.network.neutron [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Updated VIF entry in instance network info cache for port 7ec66ae2-2713-4784-8a3e-bba562877a03. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 656.178401] env[68569]: DEBUG nova.network.neutron [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Updating instance_info_cache with network_info: [{"id": "7ec66ae2-2713-4784-8a3e-bba562877a03", "address": "fa:16:3e:bd:42:bb", "network": {"id": "7738c508-0306-4079-a892-0554c562ce54", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1732251555-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "062b8ddb5f0d46d08425a66db32471be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ec66ae2-27", "ovs_interfaceid": "7ec66ae2-2713-4784-8a3e-bba562877a03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.183913] env[68569]: DEBUG nova.network.neutron [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Successfully created port: 192a6986-c831-42da-bce9-c4bde617262e {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 656.427674] env[68569]: DEBUG nova.network.neutron [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Updating instance_info_cache with network_info: [{"id": "43722f3d-a2c3-44a7-9b31-b2477b2d126e", "address": "fa:16:3e:3d:ee:1d", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43722f3d-a2", "ovs_interfaceid": "43722f3d-a2c3-44a7-9b31-b2477b2d126e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.496428] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166603, 'name': ReconfigVM_Task, 'duration_secs': 0.690472} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.502016] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Reconfigured VM instance instance-00000015 to attach disk [datastore2] 9bb06e82-cc5c-4673-b1f6-aae87568aa9c/9bb06e82-cc5c-4673-b1f6-aae87568aa9c.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 656.502016] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b61dce3e-4202-4379-8f6f-298199f2647f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.510146] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 656.510146] env[68569]: value = "task-3166604" [ 656.510146] env[68569]: _type = "Task" [ 656.510146] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.524509] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166604, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.634743] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522b09a6-eae9-4a5f-42bd-b99c576792e3, 'name': SearchDatastore_Task, 'duration_secs': 0.011184} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.635220] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.635472] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 656.635779] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.635936] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.636185] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 656.636624] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f80022bb-c710-4309-b503-694950bbc544 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.654393] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 656.654592] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 656.655705] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6853f07d-db01-4392-a92e-430594ac0572 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.666503] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 656.666503] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52504c07-2d56-16dd-dd0d-cff8e217f086" [ 656.666503] env[68569]: _type = "Task" [ 656.666503] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.678554] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52504c07-2d56-16dd-dd0d-cff8e217f086, 'name': SearchDatastore_Task, 'duration_secs': 0.009901} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.683367] env[68569]: DEBUG oslo_concurrency.lockutils [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] Releasing lock "refresh_cache-239c1217-ba8e-474a-b02c-7d85e3ac92f4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.683549] env[68569]: DEBUG nova.compute.manager [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Received event network-vif-plugged-43722f3d-a2c3-44a7-9b31-b2477b2d126e {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 656.683738] env[68569]: DEBUG oslo_concurrency.lockutils [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] Acquiring lock "303f41c0-7a19-48b2-a072-4f138f6f8156-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.683942] env[68569]: DEBUG oslo_concurrency.lockutils [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] Lock "303f41c0-7a19-48b2-a072-4f138f6f8156-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.685896] env[68569]: DEBUG oslo_concurrency.lockutils [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] Lock "303f41c0-7a19-48b2-a072-4f138f6f8156-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.685896] env[68569]: DEBUG nova.compute.manager [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] No waiting events found dispatching network-vif-plugged-43722f3d-a2c3-44a7-9b31-b2477b2d126e {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 656.685896] env[68569]: WARNING nova.compute.manager [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Received unexpected event network-vif-plugged-43722f3d-a2c3-44a7-9b31-b2477b2d126e for instance with vm_state building and task_state spawning. [ 656.685896] env[68569]: DEBUG nova.compute.manager [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Received event network-changed-43722f3d-a2c3-44a7-9b31-b2477b2d126e {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 656.685896] env[68569]: DEBUG nova.compute.manager [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Refreshing instance network info cache due to event network-changed-43722f3d-a2c3-44a7-9b31-b2477b2d126e. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 656.686108] env[68569]: DEBUG oslo_concurrency.lockutils [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] Acquiring lock "refresh_cache-303f41c0-7a19-48b2-a072-4f138f6f8156" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.686108] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae4d233f-d8db-40a8-9a07-44c552394a29 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.693290] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 656.693290] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fa8379-16c7-67d2-9f4c-85e91d233c78" [ 656.693290] env[68569]: _type = "Task" [ 656.693290] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.701974] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fa8379-16c7-67d2-9f4c-85e91d233c78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.793253] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec57329-3ef1-4299-8f5a-77ffa31dc0eb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.801238] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf022a45-e890-49f2-9324-ca3a79ef098f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.835019] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0e92e8-0514-4249-982e-c17bec7bed13 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.842960] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73436791-7bab-4ecb-8fa1-bb28650b8a29 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.857639] env[68569]: DEBUG nova.compute.provider_tree [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.936030] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "refresh_cache-303f41c0-7a19-48b2-a072-4f138f6f8156" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.936030] env[68569]: DEBUG nova.compute.manager [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Instance network_info: |[{"id": "43722f3d-a2c3-44a7-9b31-b2477b2d126e", "address": "fa:16:3e:3d:ee:1d", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43722f3d-a2", "ovs_interfaceid": "43722f3d-a2c3-44a7-9b31-b2477b2d126e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 656.936204] env[68569]: DEBUG oslo_concurrency.lockutils [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] Acquired lock "refresh_cache-303f41c0-7a19-48b2-a072-4f138f6f8156" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.936204] env[68569]: DEBUG nova.network.neutron [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Refreshing network info cache for port 43722f3d-a2c3-44a7-9b31-b2477b2d126e {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 656.937261] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:ee:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db328342-7107-4bac-b1d6-111fbd5780f1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43722f3d-a2c3-44a7-9b31-b2477b2d126e', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 656.944914] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Creating folder: Project (decd2576711b41bbb25300d9db62643e). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.948345] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c4ede4a-a461-4a0a-9e14-7f8d0cab9607 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.960275] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Created folder: Project (decd2576711b41bbb25300d9db62643e) in parent group-v633430. [ 656.961038] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Creating folder: Instances. Parent ref: group-v633501. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.961038] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f8a220d1-7963-4f66-9a1f-a6302ae05267 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.969455] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Created folder: Instances in parent group-v633501. [ 656.971165] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 656.971165] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 656.971165] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89b5b8d5-76a8-4863-95ad-0cbbb86becd7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.991956] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 656.991956] env[68569]: value = "task-3166607" [ 656.991956] env[68569]: _type = "Task" [ 656.991956] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.999825] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166607, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.022228] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166604, 'name': Rename_Task, 'duration_secs': 0.221893} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.022517] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 657.022760] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55c989d7-8ebc-4efc-93f5-34bdd5354277 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.029673] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 657.029673] env[68569]: value = "task-3166608" [ 657.029673] env[68569]: _type = "Task" [ 657.029673] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.038085] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166608, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.149766] env[68569]: DEBUG nova.compute.manager [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 657.167847] env[68569]: DEBUG nova.network.neutron [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Updated VIF entry in instance network info cache for port 43722f3d-a2c3-44a7-9b31-b2477b2d126e. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 657.168206] env[68569]: DEBUG nova.network.neutron [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Updating instance_info_cache with network_info: [{"id": "43722f3d-a2c3-44a7-9b31-b2477b2d126e", "address": "fa:16:3e:3d:ee:1d", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43722f3d-a2", "ovs_interfaceid": "43722f3d-a2c3-44a7-9b31-b2477b2d126e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.171600] env[68569]: DEBUG nova.virt.hardware [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 657.171814] env[68569]: DEBUG nova.virt.hardware [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 657.171965] env[68569]: DEBUG nova.virt.hardware [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 657.172155] env[68569]: DEBUG nova.virt.hardware [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 657.172316] env[68569]: DEBUG nova.virt.hardware [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 657.172455] env[68569]: DEBUG nova.virt.hardware [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 657.172658] env[68569]: DEBUG nova.virt.hardware [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 657.172812] env[68569]: DEBUG nova.virt.hardware [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 657.172971] env[68569]: DEBUG nova.virt.hardware [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 657.173146] env[68569]: DEBUG nova.virt.hardware [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 657.173314] env[68569]: DEBUG nova.virt.hardware [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 657.174142] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5cca25-82b1-491f-b639-2a0ecf2bac74 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.182309] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5eb727f-de6c-4812-9270-a104735039e6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.203611] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fa8379-16c7-67d2-9f4c-85e91d233c78, 'name': SearchDatastore_Task, 'duration_secs': 0.011083} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.203848] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.204119] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 239c1217-ba8e-474a-b02c-7d85e3ac92f4/239c1217-ba8e-474a-b02c-7d85e3ac92f4.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 657.204369] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75ae260c-92aa-401f-b57d-a78f075869d6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.212626] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 657.212626] env[68569]: value = "task-3166609" [ 657.212626] env[68569]: _type = "Task" [ 657.212626] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.220205] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166609, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.361470] env[68569]: DEBUG nova.scheduler.client.report [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 657.503691] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166607, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.539960] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166608, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.671448] env[68569]: DEBUG oslo_concurrency.lockutils [req-cfbd1774-ec68-4964-bae3-13d9e4093cde req-8ce8dc56-0541-498f-9828-fdb16e8202fb service nova] Releasing lock "refresh_cache-303f41c0-7a19-48b2-a072-4f138f6f8156" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.725505] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166609, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465533} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.725791] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 239c1217-ba8e-474a-b02c-7d85e3ac92f4/239c1217-ba8e-474a-b02c-7d85e3ac92f4.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 657.726078] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 657.726402] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87580453-259d-4ec0-99b6-8cefcc5b5083 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.733373] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 657.733373] env[68569]: value = "task-3166610" [ 657.733373] env[68569]: _type = "Task" [ 657.733373] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.741838] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166610, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.778636] env[68569]: DEBUG nova.compute.manager [req-8a56637c-31aa-4a9c-a9a1-4d2294b9b857 req-fe467ac0-bf12-4345-9f23-9c44480ccedd service nova] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Received event network-vif-plugged-192a6986-c831-42da-bce9-c4bde617262e {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 657.778636] env[68569]: DEBUG oslo_concurrency.lockutils [req-8a56637c-31aa-4a9c-a9a1-4d2294b9b857 req-fe467ac0-bf12-4345-9f23-9c44480ccedd service nova] Acquiring lock "b2e6de60-b4e5-4030-bca7-355d17fec06d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.778636] env[68569]: DEBUG oslo_concurrency.lockutils [req-8a56637c-31aa-4a9c-a9a1-4d2294b9b857 req-fe467ac0-bf12-4345-9f23-9c44480ccedd service nova] Lock "b2e6de60-b4e5-4030-bca7-355d17fec06d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.779160] env[68569]: DEBUG oslo_concurrency.lockutils [req-8a56637c-31aa-4a9c-a9a1-4d2294b9b857 req-fe467ac0-bf12-4345-9f23-9c44480ccedd service nova] Lock "b2e6de60-b4e5-4030-bca7-355d17fec06d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.779837] env[68569]: DEBUG nova.compute.manager [req-8a56637c-31aa-4a9c-a9a1-4d2294b9b857 req-fe467ac0-bf12-4345-9f23-9c44480ccedd service nova] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] No waiting events found dispatching network-vif-plugged-192a6986-c831-42da-bce9-c4bde617262e {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 657.780687] env[68569]: WARNING nova.compute.manager [req-8a56637c-31aa-4a9c-a9a1-4d2294b9b857 req-fe467ac0-bf12-4345-9f23-9c44480ccedd service nova] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Received unexpected event network-vif-plugged-192a6986-c831-42da-bce9-c4bde617262e for instance with vm_state building and task_state spawning. [ 657.866253] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.751s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.867180] env[68569]: DEBUG nova.compute.manager [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 657.872924] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.489s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.872924] env[68569]: DEBUG nova.objects.instance [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Lazy-loading 'resources' on Instance uuid 26625edb-06ca-48cc-aaf1-3c55a6ea942b {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 657.927788] env[68569]: DEBUG nova.network.neutron [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Successfully updated port: 192a6986-c831-42da-bce9-c4bde617262e {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 658.004775] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166607, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.041493] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166608, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.243024] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166610, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069919} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.243382] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 658.244292] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433605f9-b799-4203-aa26-bce362bd4a2e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.268557] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 239c1217-ba8e-474a-b02c-7d85e3ac92f4/239c1217-ba8e-474a-b02c-7d85e3ac92f4.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 658.268984] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5f52084-94ad-4559-a842-c1928271da81 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.290900] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 658.290900] env[68569]: value = "task-3166611" [ 658.290900] env[68569]: _type = "Task" [ 658.290900] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.301306] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166611, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.374995] env[68569]: DEBUG nova.compute.utils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 658.379373] env[68569]: DEBUG nova.compute.manager [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 658.379627] env[68569]: DEBUG nova.network.neutron [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 658.430496] env[68569]: DEBUG nova.policy [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ead08530ca148d08b9c443c0c035949', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46b9b50afee8436b91fa14327ee5382a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 658.432293] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Acquiring lock "refresh_cache-b2e6de60-b4e5-4030-bca7-355d17fec06d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.432293] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Acquired lock "refresh_cache-b2e6de60-b4e5-4030-bca7-355d17fec06d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.432293] env[68569]: DEBUG nova.network.neutron [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 658.504170] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166607, 'name': CreateVM_Task, 'duration_secs': 1.085888} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.504393] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 658.505058] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.508568] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.508959] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 658.511781] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67eee272-3fbc-41b5-bd4b-eaa72128c143 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.517938] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 658.517938] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522faf03-88be-4bc7-32eb-d93976ef74a3" [ 658.517938] env[68569]: _type = "Task" [ 658.517938] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.526534] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522faf03-88be-4bc7-32eb-d93976ef74a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.539747] env[68569]: DEBUG oslo_vmware.api [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166608, 'name': PowerOnVM_Task, 'duration_secs': 1.277208} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.539876] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 658.540170] env[68569]: DEBUG nova.compute.manager [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 658.540967] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf37537-d533-440a-93c3-ce8bc05d7b0d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.804139] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166611, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.819943] env[68569]: DEBUG nova.network.neutron [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Successfully created port: 441ff8db-edf3-4231-bf56-9a1c6b7b348b {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 658.880206] env[68569]: DEBUG nova.compute.manager [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 658.943787] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aac2b9d-2da9-4c22-b6d6-882dfa7aedd3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.952035] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38f0e25-a1f2-40be-8440-7bb91a8079b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.992601] env[68569]: DEBUG nova.network.neutron [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.995046] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8dce360-2786-477c-b559-d5eb48ad90cc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.004085] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6013f1-69cb-405d-93c4-773c10596543 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.019531] env[68569]: DEBUG nova.compute.provider_tree [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.034060] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522faf03-88be-4bc7-32eb-d93976ef74a3, 'name': SearchDatastore_Task, 'duration_secs': 0.023807} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.034396] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.034642] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 659.034883] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.035058] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.035238] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 659.035537] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b55a245f-7fdf-46f7-ba5b-db8f280f313b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.045403] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 659.045554] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 659.046317] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b563c92-8946-419f-bfec-3486083a1ebc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.053344] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 659.053344] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52efbf24-6a55-f660-af71-72a099fc83d8" [ 659.053344] env[68569]: _type = "Task" [ 659.053344] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.066156] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52efbf24-6a55-f660-af71-72a099fc83d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.066665] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.286231] env[68569]: DEBUG nova.network.neutron [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Updating instance_info_cache with network_info: [{"id": "192a6986-c831-42da-bce9-c4bde617262e", "address": "fa:16:3e:df:04:4a", "network": {"id": "0ce7bcdf-220a-4073-978c-559b6c03369d", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1220421608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "128ddc15398c4aadb4212393c51670dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap192a6986-c8", "ovs_interfaceid": "192a6986-c831-42da-bce9-c4bde617262e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.302546] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166611, 'name': ReconfigVM_Task, 'duration_secs': 0.991846} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.303552] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 239c1217-ba8e-474a-b02c-7d85e3ac92f4/239c1217-ba8e-474a-b02c-7d85e3ac92f4.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 659.304233] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-327f9217-5a9a-40b0-810e-2b41b0a00fb3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.311167] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 659.311167] env[68569]: value = "task-3166612" [ 659.311167] env[68569]: _type = "Task" [ 659.311167] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.322486] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166612, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.530029] env[68569]: DEBUG nova.scheduler.client.report [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 659.563859] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52efbf24-6a55-f660-af71-72a099fc83d8, 'name': SearchDatastore_Task, 'duration_secs': 0.012021} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.564612] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5e25c49-6340-4027-9cf2-df47f2265443 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.570993] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 659.570993] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5288e7ab-06fc-3859-02a3-33021ff548fd" [ 659.570993] env[68569]: _type = "Task" [ 659.570993] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.578713] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5288e7ab-06fc-3859-02a3-33021ff548fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.788721] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Releasing lock "refresh_cache-b2e6de60-b4e5-4030-bca7-355d17fec06d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.789143] env[68569]: DEBUG nova.compute.manager [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Instance network_info: |[{"id": "192a6986-c831-42da-bce9-c4bde617262e", "address": "fa:16:3e:df:04:4a", "network": {"id": "0ce7bcdf-220a-4073-978c-559b6c03369d", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1220421608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "128ddc15398c4aadb4212393c51670dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap192a6986-c8", "ovs_interfaceid": "192a6986-c831-42da-bce9-c4bde617262e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 659.789591] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:04:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '12d8eedb-97cb-4d3b-b364-42d7fd8b3c85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '192a6986-c831-42da-bce9-c4bde617262e', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 659.797072] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Creating folder: Project (128ddc15398c4aadb4212393c51670dd). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 659.797704] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7300114a-0a88-4119-8daf-bed572b435f1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.809463] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Created folder: Project (128ddc15398c4aadb4212393c51670dd) in parent group-v633430. [ 659.809589] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Creating folder: Instances. Parent ref: group-v633504. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 659.809924] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6da37e3f-af19-413c-9aab-2998547abcdb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.823880] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166612, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.825551] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Created folder: Instances in parent group-v633504. [ 659.825551] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 659.825551] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 659.825551] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-295e3c5a-d2fa-4adc-9692-439a2d98fd86 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.845780] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 659.845780] env[68569]: value = "task-3166615" [ 659.845780] env[68569]: _type = "Task" [ 659.845780] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.853627] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166615, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.891519] env[68569]: DEBUG nova.compute.manager [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 659.914341] env[68569]: DEBUG nova.virt.hardware [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 659.915024] env[68569]: DEBUG nova.virt.hardware [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 659.915024] env[68569]: DEBUG nova.virt.hardware [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 659.915024] env[68569]: DEBUG nova.virt.hardware [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 659.915024] env[68569]: DEBUG nova.virt.hardware [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 659.915282] env[68569]: DEBUG nova.virt.hardware [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 659.915384] env[68569]: DEBUG nova.virt.hardware [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 659.915549] env[68569]: DEBUG nova.virt.hardware [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 659.915645] env[68569]: DEBUG nova.virt.hardware [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 659.915804] env[68569]: DEBUG nova.virt.hardware [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 659.915973] env[68569]: DEBUG nova.virt.hardware [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 659.916926] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b07b623-683f-4168-946d-afd6f47aa69c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.925660] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d6360c-de1e-4533-b75c-941ececa69f1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.948502] env[68569]: DEBUG nova.compute.manager [req-58231bc9-78d3-4afd-8369-ad165d90b5c7 req-85703e6f-f305-4bd2-b673-1199bdff7e83 service nova] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Received event network-changed-192a6986-c831-42da-bce9-c4bde617262e {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 659.948502] env[68569]: DEBUG nova.compute.manager [req-58231bc9-78d3-4afd-8369-ad165d90b5c7 req-85703e6f-f305-4bd2-b673-1199bdff7e83 service nova] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Refreshing instance network info cache due to event network-changed-192a6986-c831-42da-bce9-c4bde617262e. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 659.948502] env[68569]: DEBUG oslo_concurrency.lockutils [req-58231bc9-78d3-4afd-8369-ad165d90b5c7 req-85703e6f-f305-4bd2-b673-1199bdff7e83 service nova] Acquiring lock "refresh_cache-b2e6de60-b4e5-4030-bca7-355d17fec06d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.948502] env[68569]: DEBUG oslo_concurrency.lockutils [req-58231bc9-78d3-4afd-8369-ad165d90b5c7 req-85703e6f-f305-4bd2-b673-1199bdff7e83 service nova] Acquired lock "refresh_cache-b2e6de60-b4e5-4030-bca7-355d17fec06d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.948502] env[68569]: DEBUG nova.network.neutron [req-58231bc9-78d3-4afd-8369-ad165d90b5c7 req-85703e6f-f305-4bd2-b673-1199bdff7e83 service nova] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Refreshing network info cache for port 192a6986-c831-42da-bce9-c4bde617262e {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 660.034680] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.164s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.037983] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.094s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.063687] env[68569]: INFO nova.scheduler.client.report [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Deleted allocations for instance 26625edb-06ca-48cc-aaf1-3c55a6ea942b [ 660.085497] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5288e7ab-06fc-3859-02a3-33021ff548fd, 'name': SearchDatastore_Task, 'duration_secs': 0.01106} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.085817] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.086117] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 303f41c0-7a19-48b2-a072-4f138f6f8156/303f41c0-7a19-48b2-a072-4f138f6f8156.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 660.086724] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e762eb5-cec4-4dde-9267-ded3b082bf7a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.096902] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 660.096902] env[68569]: value = "task-3166616" [ 660.096902] env[68569]: _type = "Task" [ 660.096902] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.105465] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166616, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.243905] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquiring lock "9bb06e82-cc5c-4673-b1f6-aae87568aa9c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.244228] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Lock "9bb06e82-cc5c-4673-b1f6-aae87568aa9c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.244451] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquiring lock "9bb06e82-cc5c-4673-b1f6-aae87568aa9c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.244743] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Lock "9bb06e82-cc5c-4673-b1f6-aae87568aa9c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.244815] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Lock "9bb06e82-cc5c-4673-b1f6-aae87568aa9c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.247260] env[68569]: INFO nova.compute.manager [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Terminating instance [ 660.326224] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166612, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.361595] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166615, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.576445] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b4bbbb0d-d513-47de-95ec-94c1301462e6 tempest-ServerDiagnosticsNegativeTest-1070502688 tempest-ServerDiagnosticsNegativeTest-1070502688-project-member] Lock "26625edb-06ca-48cc-aaf1-3c55a6ea942b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.359s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.608826] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166616, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.625445] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43cb43e-727a-4296-b802-e012a08f70ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.631573] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eebb0142-56dd-4ff0-ad6d-74d3388ffb44 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.665248] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291fb21e-3bef-45ff-95a4-7a1a1df70ddc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.673292] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e70f4d-7340-4927-be8d-dff08be49154 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.688433] env[68569]: DEBUG nova.compute.provider_tree [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 660.756711] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquiring lock "refresh_cache-9bb06e82-cc5c-4673-b1f6-aae87568aa9c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.756959] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquired lock "refresh_cache-9bb06e82-cc5c-4673-b1f6-aae87568aa9c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.757117] env[68569]: DEBUG nova.network.neutron [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 660.818274] env[68569]: DEBUG nova.network.neutron [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Successfully updated port: 441ff8db-edf3-4231-bf56-9a1c6b7b348b {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 660.831522] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166612, 'name': Rename_Task, 'duration_secs': 1.101178} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.832463] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 660.832463] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76b37f90-6feb-4b42-8ab9-ba8748cbe2d3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.841742] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 660.841742] env[68569]: value = "task-3166617" [ 660.841742] env[68569]: _type = "Task" [ 660.841742] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.846922] env[68569]: DEBUG nova.network.neutron [req-58231bc9-78d3-4afd-8369-ad165d90b5c7 req-85703e6f-f305-4bd2-b673-1199bdff7e83 service nova] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Updated VIF entry in instance network info cache for port 192a6986-c831-42da-bce9-c4bde617262e. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 660.847299] env[68569]: DEBUG nova.network.neutron [req-58231bc9-78d3-4afd-8369-ad165d90b5c7 req-85703e6f-f305-4bd2-b673-1199bdff7e83 service nova] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Updating instance_info_cache with network_info: [{"id": "192a6986-c831-42da-bce9-c4bde617262e", "address": "fa:16:3e:df:04:4a", "network": {"id": "0ce7bcdf-220a-4073-978c-559b6c03369d", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1220421608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "128ddc15398c4aadb4212393c51670dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap192a6986-c8", "ovs_interfaceid": "192a6986-c831-42da-bce9-c4bde617262e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.856981] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166617, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.861686] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166615, 'name': CreateVM_Task, 'duration_secs': 0.774052} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.861854] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 660.862930] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.863351] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.863470] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 660.863735] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc2be19f-9c6f-49a6-85eb-4d55638fe552 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.870779] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Waiting for the task: (returnval){ [ 660.870779] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b1d53e-6397-481b-a3fa-2de718a08c31" [ 660.870779] env[68569]: _type = "Task" [ 660.870779] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.880013] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b1d53e-6397-481b-a3fa-2de718a08c31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.108465] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166616, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527369} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.108771] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 303f41c0-7a19-48b2-a072-4f138f6f8156/303f41c0-7a19-48b2-a072-4f138f6f8156.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 661.108992] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 661.109289] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90b38bf0-331e-4c89-85ec-08c8ee4f8b92 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.117733] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 661.117733] env[68569]: value = "task-3166618" [ 661.117733] env[68569]: _type = "Task" [ 661.117733] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.127568] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166618, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.191925] env[68569]: DEBUG nova.scheduler.client.report [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 661.280877] env[68569]: DEBUG nova.network.neutron [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.322373] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Acquiring lock "refresh_cache-f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.322542] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Acquired lock "refresh_cache-f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.322692] env[68569]: DEBUG nova.network.neutron [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 661.352564] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166617, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.356229] env[68569]: DEBUG oslo_concurrency.lockutils [req-58231bc9-78d3-4afd-8369-ad165d90b5c7 req-85703e6f-f305-4bd2-b673-1199bdff7e83 service nova] Releasing lock "refresh_cache-b2e6de60-b4e5-4030-bca7-355d17fec06d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.363309] env[68569]: DEBUG nova.network.neutron [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.381902] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b1d53e-6397-481b-a3fa-2de718a08c31, 'name': SearchDatastore_Task, 'duration_secs': 0.011107} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.382228] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.382461] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 661.382693] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.382836] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.383019] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 661.383291] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f58326b-06d8-4a44-87d6-2d6f1605ffd2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.391786] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 661.392359] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 661.392729] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ea3c937-f408-4dcb-8981-8e8b14c4c90f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.398476] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Waiting for the task: (returnval){ [ 661.398476] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fac4dc-7378-e282-6e5a-eb6f8af775f1" [ 661.398476] env[68569]: _type = "Task" [ 661.398476] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.407447] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fac4dc-7378-e282-6e5a-eb6f8af775f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.627829] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166618, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076525} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.628131] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 661.628921] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829e4a23-190c-4bd2-bb24-8ed41f464fc3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.651374] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] 303f41c0-7a19-48b2-a072-4f138f6f8156/303f41c0-7a19-48b2-a072-4f138f6f8156.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 661.651374] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-640c85bb-7687-4642-b161-020d63971a7c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.671066] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 661.671066] env[68569]: value = "task-3166619" [ 661.671066] env[68569]: _type = "Task" [ 661.671066] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.683437] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166619, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.698390] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.661s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.698761] env[68569]: INFO nova.compute.manager [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Successfully reverted task state from resize_migrating on failure for instance. [ 661.709801] env[68569]: DEBUG oslo_concurrency.lockutils [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.738s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.711215] env[68569]: DEBUG nova.objects.instance [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Lazy-loading 'resources' on Instance uuid 7129a57f-e639-49ae-96a9-3c1d966034a8 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server [None req-c890b50e-8565-4f6a-a7c4-b5cc72500a05 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Exception during message handling: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 661.719096] env[68569]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 168, in decorated_function [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 159, in decorated_function [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 215, in decorated_function [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 205, in decorated_function [ 661.719510] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 661.719922] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6583, in resize_instance [ 661.719922] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 661.719922] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 661.719922] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 661.719922] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 661.719922] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 661.719922] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6580, in resize_instance [ 661.719922] env[68569]: ERROR oslo_messaging.rpc.server self._resize_instance(context, instance, image, migration, [ 661.719922] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 661.719922] env[68569]: ERROR oslo_messaging.rpc.server disk_info = self.driver.migrate_disk_and_power_off( [ 661.719922] env[68569]: ERROR oslo_messaging.rpc.server TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 661.719922] env[68569]: ERROR oslo_messaging.rpc.server [ 661.859022] env[68569]: DEBUG oslo_vmware.api [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166617, 'name': PowerOnVM_Task, 'duration_secs': 0.584423} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.859022] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 661.859022] env[68569]: INFO nova.compute.manager [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Took 10.18 seconds to spawn the instance on the hypervisor. [ 661.859022] env[68569]: DEBUG nova.compute.manager [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 661.859022] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a44356-057b-45f1-ba78-bb947219699e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.867875] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Releasing lock "refresh_cache-9bb06e82-cc5c-4673-b1f6-aae87568aa9c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.872023] env[68569]: DEBUG nova.compute.manager [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 661.872023] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 661.872023] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2faf367a-e0b2-461e-8af8-fd2a872dadb2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.878584] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 661.880479] env[68569]: DEBUG nova.network.neutron [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.882881] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff364b95-d3b7-426c-be91-5c3559fd2752 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.892063] env[68569]: DEBUG oslo_vmware.api [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 661.892063] env[68569]: value = "task-3166620" [ 661.892063] env[68569]: _type = "Task" [ 661.892063] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.902536] env[68569]: DEBUG oslo_vmware.api [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166620, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.912484] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fac4dc-7378-e282-6e5a-eb6f8af775f1, 'name': SearchDatastore_Task, 'duration_secs': 0.011568} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.915692] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b769fd1a-d89f-48a0-a404-3d98f604e07a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.921376] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Waiting for the task: (returnval){ [ 661.921376] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52762996-fa8b-6575-9d84-e8ad8cb55f7d" [ 661.921376] env[68569]: _type = "Task" [ 661.921376] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.929783] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52762996-fa8b-6575-9d84-e8ad8cb55f7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.972628] env[68569]: DEBUG nova.compute.manager [req-037cf02b-7e99-4421-9300-6e08a2aa8459 req-9f475fa8-92bf-4b10-9b9a-b2f73da08d7e service nova] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Received event network-vif-plugged-441ff8db-edf3-4231-bf56-9a1c6b7b348b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 661.972853] env[68569]: DEBUG oslo_concurrency.lockutils [req-037cf02b-7e99-4421-9300-6e08a2aa8459 req-9f475fa8-92bf-4b10-9b9a-b2f73da08d7e service nova] Acquiring lock "f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.973089] env[68569]: DEBUG oslo_concurrency.lockutils [req-037cf02b-7e99-4421-9300-6e08a2aa8459 req-9f475fa8-92bf-4b10-9b9a-b2f73da08d7e service nova] Lock "f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.973587] env[68569]: DEBUG oslo_concurrency.lockutils [req-037cf02b-7e99-4421-9300-6e08a2aa8459 req-9f475fa8-92bf-4b10-9b9a-b2f73da08d7e service nova] Lock "f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.973587] env[68569]: DEBUG nova.compute.manager [req-037cf02b-7e99-4421-9300-6e08a2aa8459 req-9f475fa8-92bf-4b10-9b9a-b2f73da08d7e service nova] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] No waiting events found dispatching network-vif-plugged-441ff8db-edf3-4231-bf56-9a1c6b7b348b {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 661.973587] env[68569]: WARNING nova.compute.manager [req-037cf02b-7e99-4421-9300-6e08a2aa8459 req-9f475fa8-92bf-4b10-9b9a-b2f73da08d7e service nova] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Received unexpected event network-vif-plugged-441ff8db-edf3-4231-bf56-9a1c6b7b348b for instance with vm_state building and task_state spawning. [ 661.973748] env[68569]: DEBUG nova.compute.manager [req-037cf02b-7e99-4421-9300-6e08a2aa8459 req-9f475fa8-92bf-4b10-9b9a-b2f73da08d7e service nova] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Received event network-changed-441ff8db-edf3-4231-bf56-9a1c6b7b348b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 661.973902] env[68569]: DEBUG nova.compute.manager [req-037cf02b-7e99-4421-9300-6e08a2aa8459 req-9f475fa8-92bf-4b10-9b9a-b2f73da08d7e service nova] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Refreshing instance network info cache due to event network-changed-441ff8db-edf3-4231-bf56-9a1c6b7b348b. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 661.974074] env[68569]: DEBUG oslo_concurrency.lockutils [req-037cf02b-7e99-4421-9300-6e08a2aa8459 req-9f475fa8-92bf-4b10-9b9a-b2f73da08d7e service nova] Acquiring lock "refresh_cache-f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.066959] env[68569]: DEBUG nova.network.neutron [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Updating instance_info_cache with network_info: [{"id": "441ff8db-edf3-4231-bf56-9a1c6b7b348b", "address": "fa:16:3e:c8:1f:a9", "network": {"id": "ea44cd90-58bc-431b-8988-ffd5f0ec9136", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1472118572-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46b9b50afee8436b91fa14327ee5382a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap441ff8db-ed", "ovs_interfaceid": "441ff8db-edf3-4231-bf56-9a1c6b7b348b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.182514] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166619, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.381154] env[68569]: INFO nova.compute.manager [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Took 33.95 seconds to build instance. [ 662.409782] env[68569]: DEBUG oslo_vmware.api [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166620, 'name': PowerOffVM_Task, 'duration_secs': 0.235818} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.414631] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 662.415096] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 662.416152] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ad7414c-84a2-4b35-b710-f896a75e740a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.432901] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52762996-fa8b-6575-9d84-e8ad8cb55f7d, 'name': SearchDatastore_Task, 'duration_secs': 0.011383} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.436148] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.436582] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] b2e6de60-b4e5-4030-bca7-355d17fec06d/b2e6de60-b4e5-4030-bca7-355d17fec06d.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 662.437677] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9c727e2-ff16-4367-bfe3-12e700a5e40e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.441920] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 662.442232] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 662.442501] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Deleting the datastore file [datastore2] 9bb06e82-cc5c-4673-b1f6-aae87568aa9c {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 662.442890] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84f48b5d-ca48-4287-8f49-4252b654aef1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.448531] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Waiting for the task: (returnval){ [ 662.448531] env[68569]: value = "task-3166622" [ 662.448531] env[68569]: _type = "Task" [ 662.448531] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.456682] env[68569]: DEBUG oslo_vmware.api [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for the task: (returnval){ [ 662.456682] env[68569]: value = "task-3166623" [ 662.456682] env[68569]: _type = "Task" [ 662.456682] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.187739] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Releasing lock "refresh_cache-f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.188079] env[68569]: DEBUG nova.compute.manager [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Instance network_info: |[{"id": "441ff8db-edf3-4231-bf56-9a1c6b7b348b", "address": "fa:16:3e:c8:1f:a9", "network": {"id": "ea44cd90-58bc-431b-8988-ffd5f0ec9136", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1472118572-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46b9b50afee8436b91fa14327ee5382a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap441ff8db-ed", "ovs_interfaceid": "441ff8db-edf3-4231-bf56-9a1c6b7b348b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 663.188539] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0938a4c-8ac0-4022-b4ed-b79988d5f187 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "239c1217-ba8e-474a-b02c-7d85e3ac92f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.775s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.188833] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166622, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.188905] env[68569]: WARNING oslo_vmware.common.loopingcall [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] task run outlasted interval by 0.23968699999999998 sec [ 663.189337] env[68569]: DEBUG oslo_concurrency.lockutils [req-037cf02b-7e99-4421-9300-6e08a2aa8459 req-9f475fa8-92bf-4b10-9b9a-b2f73da08d7e service nova] Acquired lock "refresh_cache-f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.189507] env[68569]: DEBUG nova.network.neutron [req-037cf02b-7e99-4421-9300-6e08a2aa8459 req-9f475fa8-92bf-4b10-9b9a-b2f73da08d7e service nova] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Refreshing network info cache for port 441ff8db-edf3-4231-bf56-9a1c6b7b348b {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 663.190899] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:1f:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '594b35bb-a20b-4f0e-bd35-9acf9cc6bf11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '441ff8db-edf3-4231-bf56-9a1c6b7b348b', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 663.197759] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Creating folder: Project (46b9b50afee8436b91fa14327ee5382a). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 663.203685] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57ab47cc-a315-4375-af30-d39e428bd146 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.217692] env[68569]: DEBUG oslo_vmware.api [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Task: {'id': task-3166623, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.118752} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.218783] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 663.219139] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 663.219412] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 663.223019] env[68569]: INFO nova.compute.manager [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Took 1.35 seconds to destroy the instance on the hypervisor. [ 663.223019] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 663.227380] env[68569]: DEBUG nova.compute.manager [-] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 663.227545] env[68569]: DEBUG nova.network.neutron [-] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 663.229643] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166622, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488892} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.230045] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166619, 'name': ReconfigVM_Task, 'duration_secs': 0.599168} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.231875] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] b2e6de60-b4e5-4030-bca7-355d17fec06d/b2e6de60-b4e5-4030-bca7-355d17fec06d.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 663.231875] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 663.232150] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Reconfigured VM instance instance-00000017 to attach disk [datastore2] 303f41c0-7a19-48b2-a072-4f138f6f8156/303f41c0-7a19-48b2-a072-4f138f6f8156.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 663.232673] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Created folder: Project (46b9b50afee8436b91fa14327ee5382a) in parent group-v633430. [ 663.232841] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Creating folder: Instances. Parent ref: group-v633507. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 663.235886] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc0b1172-5b73-400c-b4ef-59fb720fdc9a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.238074] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-589f3a6f-1eed-4e5e-a42b-171560126ae2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.240278] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed87df04-1964-41a5-be07-a0f42af0b595 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.247846] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Waiting for the task: (returnval){ [ 663.247846] env[68569]: value = "task-3166626" [ 663.247846] env[68569]: _type = "Task" [ 663.247846] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.249365] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 663.249365] env[68569]: value = "task-3166625" [ 663.249365] env[68569]: _type = "Task" [ 663.249365] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.259513] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Created folder: Instances in parent group-v633507. [ 663.259777] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 663.261445] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 663.261526] env[68569]: DEBUG nova.network.neutron [-] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.262918] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3acece35-9e48-47d2-9d26-6b4c79a57fd1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.283722] env[68569]: DEBUG nova.network.neutron [-] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.288499] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166626, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.288889] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166625, 'name': Rename_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.294352] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 663.294352] env[68569]: value = "task-3166628" [ 663.294352] env[68569]: _type = "Task" [ 663.294352] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.302920] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166628, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.587591] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1f032b-7b48-4ee9-9140-5ce279c792c9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.597026] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f05c14-d82f-495c-bbca-e52287951b3d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.631021] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64cb84bc-5e17-423f-8816-dcaa04773213 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.639280] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee81679e-1d7a-47f0-aefc-0a763a6f0dfa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.653097] env[68569]: DEBUG nova.compute.provider_tree [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 663.706456] env[68569]: DEBUG nova.compute.manager [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 663.761226] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166626, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.143854} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.766457] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 663.767046] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166625, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.767783] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc44e446-7696-4368-a930-6b3b4da2eb58 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.789831] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] b2e6de60-b4e5-4030-bca7-355d17fec06d/b2e6de60-b4e5-4030-bca7-355d17fec06d.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 663.790538] env[68569]: INFO nova.compute.manager [-] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Took 0.56 seconds to deallocate network for instance. [ 663.790752] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03a99e05-3cfe-436b-9dd8-7b7f141aa247 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.819493] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Waiting for the task: (returnval){ [ 663.819493] env[68569]: value = "task-3166629" [ 663.819493] env[68569]: _type = "Task" [ 663.819493] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.822874] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166628, 'name': CreateVM_Task, 'duration_secs': 0.381427} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.825916] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 663.826573] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.826740] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.827057] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 663.827731] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1910a741-994d-43d1-ad58-2aac2c0cea74 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.834126] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166629, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.834567] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Waiting for the task: (returnval){ [ 663.834567] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5299154b-a3eb-48e4-320e-15569acc15c9" [ 663.834567] env[68569]: _type = "Task" [ 663.834567] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.844069] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5299154b-a3eb-48e4-320e-15569acc15c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.994466] env[68569]: DEBUG nova.network.neutron [req-037cf02b-7e99-4421-9300-6e08a2aa8459 req-9f475fa8-92bf-4b10-9b9a-b2f73da08d7e service nova] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Updated VIF entry in instance network info cache for port 441ff8db-edf3-4231-bf56-9a1c6b7b348b. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 663.994466] env[68569]: DEBUG nova.network.neutron [req-037cf02b-7e99-4421-9300-6e08a2aa8459 req-9f475fa8-92bf-4b10-9b9a-b2f73da08d7e service nova] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Updating instance_info_cache with network_info: [{"id": "441ff8db-edf3-4231-bf56-9a1c6b7b348b", "address": "fa:16:3e:c8:1f:a9", "network": {"id": "ea44cd90-58bc-431b-8988-ffd5f0ec9136", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1472118572-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46b9b50afee8436b91fa14327ee5382a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap441ff8db-ed", "ovs_interfaceid": "441ff8db-edf3-4231-bf56-9a1c6b7b348b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.157024] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527dc3a4-97a1-b3c2-8545-4b1b92cbf9e4/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 664.161554] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bcf0b51-8e5b-4f96-91bc-2d498b4fd0dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.170461] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527dc3a4-97a1-b3c2-8545-4b1b92cbf9e4/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 664.170661] env[68569]: ERROR oslo_vmware.rw_handles [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527dc3a4-97a1-b3c2-8545-4b1b92cbf9e4/disk-0.vmdk due to incomplete transfer. [ 664.170901] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a1684ce0-c60d-4407-988f-55b9b60ab76a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.178006] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527dc3a4-97a1-b3c2-8545-4b1b92cbf9e4/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 664.178268] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Uploaded image c992fabc-ac8f-45c3-9598-140c068ec2ed to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 664.180883] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 664.181118] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-388ead2b-78a2-41dd-9e8b-942f4cf2a63c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.184694] env[68569]: ERROR nova.scheduler.client.report [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [req-0e84e0e2-26af-4bc7-a160-e991a829fd1e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0e84e0e2-26af-4bc7-a160-e991a829fd1e"}]} [ 664.189302] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 664.189302] env[68569]: value = "task-3166630" [ 664.189302] env[68569]: _type = "Task" [ 664.189302] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.200817] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166630, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.217233] env[68569]: DEBUG nova.scheduler.client.report [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 664.234010] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.235009] env[68569]: DEBUG nova.scheduler.client.report [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 664.235865] env[68569]: DEBUG nova.compute.provider_tree [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 664.247888] env[68569]: DEBUG nova.scheduler.client.report [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 664.264315] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166625, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.266732] env[68569]: DEBUG nova.scheduler.client.report [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 664.319528] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.334163] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166629, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.348707] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5299154b-a3eb-48e4-320e-15569acc15c9, 'name': SearchDatastore_Task, 'duration_secs': 0.009602} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.349170] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.349715] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 664.349978] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.350144] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.350328] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 664.350590] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e5b0a6b-f6ea-499a-ac31-d4506877909d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.360115] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 664.360115] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 664.361241] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00783268-8a81-48b0-8472-5c48f03811de {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.366540] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Waiting for the task: (returnval){ [ 664.366540] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52de2f6e-215e-6c29-615f-4c7aa6d05025" [ 664.366540] env[68569]: _type = "Task" [ 664.366540] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.379984] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52de2f6e-215e-6c29-615f-4c7aa6d05025, 'name': SearchDatastore_Task, 'duration_secs': 0.008109} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.380074] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb8f5a34-91be-41d7-9439-e843d5c01148 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.385598] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Waiting for the task: (returnval){ [ 664.385598] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52660459-d89e-a6eb-68f4-cd67ae13d4b8" [ 664.385598] env[68569]: _type = "Task" [ 664.385598] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.394234] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52660459-d89e-a6eb-68f4-cd67ae13d4b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.466974] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 664.467687] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 664.497189] env[68569]: DEBUG oslo_concurrency.lockutils [req-037cf02b-7e99-4421-9300-6e08a2aa8459 req-9f475fa8-92bf-4b10-9b9a-b2f73da08d7e service nova] Releasing lock "refresh_cache-f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.701844] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166630, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.767893] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166625, 'name': Rename_Task, 'duration_secs': 1.29587} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.772212] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 664.775250] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07a7a444-35e8-4dee-b9ee-03a5a7323a07 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.780856] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 664.780856] env[68569]: value = "task-3166631" [ 664.780856] env[68569]: _type = "Task" [ 664.780856] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.793659] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166631, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.837530] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166629, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.881997] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e8e5da-1650-476d-8f38-2cf1110a232c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.892422] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc50efa-39fa-48d4-85db-2961a40c091f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.899633] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52660459-d89e-a6eb-68f4-cd67ae13d4b8, 'name': SearchDatastore_Task, 'duration_secs': 0.008705} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.900341] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.900670] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc/f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 664.900923] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19373423-531e-489a-a969-8c0b0e4882d1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.931368] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7789fe91-1066-4c59-bdd2-b50e8e742926 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.935268] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Waiting for the task: (returnval){ [ 664.935268] env[68569]: value = "task-3166632" [ 664.935268] env[68569]: _type = "Task" [ 664.935268] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.942099] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97defa9-77ad-4abb-ad63-39db707079ce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.948768] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': task-3166632, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.960709] env[68569]: DEBUG nova.compute.provider_tree [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 664.976985] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 664.976985] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 664.976985] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 664.976985] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 664.976985] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 664.976985] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 664.977430] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68569) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11160}} [ 664.977430] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 665.204349] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166630, 'name': Destroy_Task, 'duration_secs': 0.960037} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.204711] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Destroyed the VM [ 665.204959] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 665.206016] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-51f27274-a67a-4cba-ba17-1645b575ca87 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.213828] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 665.213828] env[68569]: value = "task-3166633" [ 665.213828] env[68569]: _type = "Task" [ 665.213828] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.227452] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166633, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.295299] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166631, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.305952] env[68569]: DEBUG nova.compute.manager [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 665.307021] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1a777b-87c9-4797-b8dd-c1e5054b65f2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.342334] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166629, 'name': ReconfigVM_Task, 'duration_secs': 1.283073} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.343018] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Reconfigured VM instance instance-00000018 to attach disk [datastore1] b2e6de60-b4e5-4030-bca7-355d17fec06d/b2e6de60-b4e5-4030-bca7-355d17fec06d.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.343896] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9e955eb-d922-4c21-917f-68e531624292 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.352284] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Waiting for the task: (returnval){ [ 665.352284] env[68569]: value = "task-3166634" [ 665.352284] env[68569]: _type = "Task" [ 665.352284] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.364982] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166634, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.380704] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "64146253-16ab-4d95-83c9-31b74014a040" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.381077] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "64146253-16ab-4d95-83c9-31b74014a040" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.414329] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "c634f7eb-2f71-473d-8f90-71d74edffecb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.414650] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "c634f7eb-2f71-473d-8f90-71d74edffecb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.449414] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.449744] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.449909] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': task-3166632, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493909} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.450148] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc/f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 665.450356] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 665.450599] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8db4dc06-2289-48c3-8bfd-527f980c7339 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.456665] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Waiting for the task: (returnval){ [ 665.456665] env[68569]: value = "task-3166635" [ 665.456665] env[68569]: _type = "Task" [ 665.456665] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.474189] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': task-3166635, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.481374] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.492065] env[68569]: ERROR nova.scheduler.client.report [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] [req-bcb06f55-8591-459a-ba45-8a8947b682f5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bcb06f55-8591-459a-ba45-8a8947b682f5"}]} [ 665.519040] env[68569]: DEBUG nova.scheduler.client.report [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 665.540427] env[68569]: DEBUG nova.scheduler.client.report [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 665.540679] env[68569]: DEBUG nova.compute.provider_tree [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 665.554111] env[68569]: DEBUG nova.scheduler.client.report [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 665.578652] env[68569]: DEBUG nova.scheduler.client.report [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 665.725447] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166633, 'name': RemoveSnapshot_Task} progress is 78%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.792524] env[68569]: DEBUG oslo_vmware.api [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166631, 'name': PowerOnVM_Task, 'duration_secs': 0.587732} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.798494] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 665.798722] env[68569]: INFO nova.compute.manager [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Took 11.40 seconds to spawn the instance on the hypervisor. [ 665.798894] env[68569]: DEBUG nova.compute.manager [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 665.799904] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d30b757-7dfb-42ac-8107-b57b8c6c66f3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.822433] env[68569]: INFO nova.compute.manager [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] instance snapshotting [ 665.828049] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12abff44-3348-462e-a50b-a417e46ca444 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.854459] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4cf23b7-76e6-4b54-ad2c-1b11e1824509 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.870639] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166634, 'name': Rename_Task, 'duration_secs': 0.181726} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.871078] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 665.871359] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2c7add2-263b-49ea-ba02-f2f4dd62d8aa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.878725] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Waiting for the task: (returnval){ [ 665.878725] env[68569]: value = "task-3166636" [ 665.878725] env[68569]: _type = "Task" [ 665.878725] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.888161] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166636, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.969221] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': task-3166635, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065923} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.970060] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 665.970367] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801070ec-aa3d-418c-9a9b-a76ab7f370c4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.996162] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc/f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 665.996162] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94107220-70ab-48da-9fc9-6d972a1e541b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.018441] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Waiting for the task: (returnval){ [ 666.018441] env[68569]: value = "task-3166637" [ 666.018441] env[68569]: _type = "Task" [ 666.018441] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.028147] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': task-3166637, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.201937] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aede756b-81f5-444f-8547-c5104d71db7c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.210392] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585296d5-aa8b-41ae-876b-dd70c6a4db8e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.251532] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe096372-6e84-4673-b8cb-c4a81fb51057 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.262810] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68a4b3c-5ce7-4def-8976-8f561b998578 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.266794] env[68569]: DEBUG oslo_vmware.api [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166633, 'name': RemoveSnapshot_Task, 'duration_secs': 1.012096} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.267109] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 666.267372] env[68569]: INFO nova.compute.manager [None req-5e5c13b9-b6cc-4851-99ab-ea166672c3ab tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Took 17.24 seconds to snapshot the instance on the hypervisor. [ 666.279996] env[68569]: DEBUG nova.compute.provider_tree [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 666.317448] env[68569]: INFO nova.compute.manager [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Took 36.40 seconds to build instance. [ 666.372247] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 666.372558] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-39f31d1b-d88a-40a8-86f6-e3984ac1e756 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.379854] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 666.379854] env[68569]: value = "task-3166638" [ 666.379854] env[68569]: _type = "Task" [ 666.379854] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.391074] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166636, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.394110] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166638, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.527895] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': task-3166637, 'name': ReconfigVM_Task, 'duration_secs': 0.296482} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.528181] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Reconfigured VM instance instance-00000019 to attach disk [datastore1] f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc/f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 666.528845] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90c5cb8e-11e2-4c9f-966b-4164135567ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.534806] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Waiting for the task: (returnval){ [ 666.534806] env[68569]: value = "task-3166639" [ 666.534806] env[68569]: _type = "Task" [ 666.534806] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.542209] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': task-3166639, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.814495] env[68569]: DEBUG nova.scheduler.client.report [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 58 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 666.814766] env[68569]: DEBUG nova.compute.provider_tree [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 58 to 59 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 666.814945] env[68569]: DEBUG nova.compute.provider_tree [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 666.820847] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0c502248-3e67-4413-b786-0d4fbaf34b44 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "303f41c0-7a19-48b2-a072-4f138f6f8156" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.912s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.889918] env[68569]: DEBUG oslo_vmware.api [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166636, 'name': PowerOnVM_Task, 'duration_secs': 0.975469} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.890634] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 666.890891] env[68569]: INFO nova.compute.manager [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Took 9.74 seconds to spawn the instance on the hypervisor. [ 666.891106] env[68569]: DEBUG nova.compute.manager [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 666.891943] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbf676d-bbbd-4be0-9410-f4fc0c9dd77d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.897665] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166638, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.045303] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': task-3166639, 'name': Rename_Task, 'duration_secs': 0.366444} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.045983] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 667.045983] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6aeb25f-7af3-453f-bd2f-5722bbebe929 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.052180] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Waiting for the task: (returnval){ [ 667.052180] env[68569]: value = "task-3166640" [ 667.052180] env[68569]: _type = "Task" [ 667.052180] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.060126] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': task-3166640, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.321017] env[68569]: DEBUG oslo_concurrency.lockutils [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 5.611s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.323433] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.324s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.324901] env[68569]: INFO nova.compute.claims [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 667.331047] env[68569]: DEBUG nova.compute.manager [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 667.357299] env[68569]: INFO nova.scheduler.client.report [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Deleted allocations for instance 7129a57f-e639-49ae-96a9-3c1d966034a8 [ 667.392709] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166638, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.413368] env[68569]: INFO nova.compute.manager [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Took 36.09 seconds to build instance. [ 667.563981] env[68569]: DEBUG oslo_vmware.api [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': task-3166640, 'name': PowerOnVM_Task, 'duration_secs': 0.461687} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.564448] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 667.564930] env[68569]: INFO nova.compute.manager [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Took 7.67 seconds to spawn the instance on the hypervisor. [ 667.565247] env[68569]: DEBUG nova.compute.manager [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 667.567498] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1dec1b-ca6c-4f18-8aa4-0f62afe68167 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.697845] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "303f41c0-7a19-48b2-a072-4f138f6f8156" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.697845] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "303f41c0-7a19-48b2-a072-4f138f6f8156" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.697845] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "303f41c0-7a19-48b2-a072-4f138f6f8156-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.697845] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "303f41c0-7a19-48b2-a072-4f138f6f8156-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.698294] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "303f41c0-7a19-48b2-a072-4f138f6f8156-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.699135] env[68569]: INFO nova.compute.manager [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Terminating instance [ 667.857809] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.870515] env[68569]: DEBUG oslo_concurrency.lockutils [None req-328e2343-9d46-4b20-b6eb-4536a7350427 tempest-FloatingIPsAssociationNegativeTestJSON-608750487 tempest-FloatingIPsAssociationNegativeTestJSON-608750487-project-member] Lock "7129a57f-e639-49ae-96a9-3c1d966034a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.264s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.896629] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166638, 'name': CreateSnapshot_Task, 'duration_secs': 1.026713} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.897073] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 667.897943] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d597fba-2414-4f7e-bc1c-219acf840d8d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.923011] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9ef90506-5aeb-4132-879a-7e38edd9e025 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Lock "b2e6de60-b4e5-4030-bca7-355d17fec06d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.613s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.017575] env[68569]: DEBUG nova.compute.manager [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 668.019356] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1461d9-3f9b-4dc3-87d7-5ceacb833b54 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.041770] env[68569]: DEBUG nova.compute.manager [req-0ddfbd4b-6bbf-4a3c-90aa-51e639cc3ce4 req-ca9e39c7-04ed-4a1b-82ed-1454a95a364c service nova] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Received event network-changed-192a6986-c831-42da-bce9-c4bde617262e {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 668.041770] env[68569]: DEBUG nova.compute.manager [req-0ddfbd4b-6bbf-4a3c-90aa-51e639cc3ce4 req-ca9e39c7-04ed-4a1b-82ed-1454a95a364c service nova] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Refreshing instance network info cache due to event network-changed-192a6986-c831-42da-bce9-c4bde617262e. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 668.041770] env[68569]: DEBUG oslo_concurrency.lockutils [req-0ddfbd4b-6bbf-4a3c-90aa-51e639cc3ce4 req-ca9e39c7-04ed-4a1b-82ed-1454a95a364c service nova] Acquiring lock "refresh_cache-b2e6de60-b4e5-4030-bca7-355d17fec06d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.041770] env[68569]: DEBUG oslo_concurrency.lockutils [req-0ddfbd4b-6bbf-4a3c-90aa-51e639cc3ce4 req-ca9e39c7-04ed-4a1b-82ed-1454a95a364c service nova] Acquired lock "refresh_cache-b2e6de60-b4e5-4030-bca7-355d17fec06d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.041770] env[68569]: DEBUG nova.network.neutron [req-0ddfbd4b-6bbf-4a3c-90aa-51e639cc3ce4 req-ca9e39c7-04ed-4a1b-82ed-1454a95a364c service nova] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Refreshing network info cache for port 192a6986-c831-42da-bce9-c4bde617262e {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 668.083683] env[68569]: INFO nova.compute.manager [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Took 36.03 seconds to build instance. [ 668.203187] env[68569]: DEBUG nova.compute.manager [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 668.203443] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 668.204469] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725fe9f4-2def-416d-bfec-a542251daaad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.215426] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 668.215816] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53d96299-0f94-4f7b-a166-1c623558d50c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.221905] env[68569]: DEBUG oslo_vmware.api [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 668.221905] env[68569]: value = "task-3166641" [ 668.221905] env[68569]: _type = "Task" [ 668.221905] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.232581] env[68569]: DEBUG oslo_vmware.api [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166641, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.418026] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 668.419569] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d2a0eab2-aa41-4f29-b239-bf645bb699ab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.425417] env[68569]: DEBUG nova.compute.manager [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 668.429770] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 668.429770] env[68569]: value = "task-3166642" [ 668.429770] env[68569]: _type = "Task" [ 668.429770] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.438369] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166642, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.463293] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Acquiring lock "f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.534106] env[68569]: INFO nova.compute.manager [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] instance snapshotting [ 668.539070] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec06b963-6376-4558-81a1-786a82df5753 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.571623] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397b2d5e-e529-4150-8ee7-ce5987406d8e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.588618] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4078530e-5131-454a-85c5-238aceef6a88 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Lock "f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.547s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.590966] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Lock "f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.127s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.590966] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Acquiring lock "f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.590966] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Lock "f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.590966] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Lock "f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.597019] env[68569]: INFO nova.compute.manager [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Terminating instance [ 668.734187] env[68569]: DEBUG oslo_vmware.api [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166641, 'name': PowerOffVM_Task, 'duration_secs': 0.364607} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.734385] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 668.734669] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 668.735310] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-40dbea1e-5853-4910-a36b-73adcabccb4b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.793739] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 668.796021] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 668.796021] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleting the datastore file [datastore2] 303f41c0-7a19-48b2-a072-4f138f6f8156 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 668.796021] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eff906d8-00e9-4778-9b88-cc06fee22fb2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.801480] env[68569]: DEBUG oslo_vmware.api [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 668.801480] env[68569]: value = "task-3166644" [ 668.801480] env[68569]: _type = "Task" [ 668.801480] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.816532] env[68569]: DEBUG oslo_vmware.api [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166644, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.947898] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166642, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.952011] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.062019] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82424c95-d283-4b49-be82-6d47b338b5a8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.067891] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3921d4d-efde-4808-8553-06dbeae4f23a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.098980] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 669.099656] env[68569]: DEBUG nova.compute.manager [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 669.102361] env[68569]: DEBUG nova.compute.manager [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 669.102709] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 669.103046] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8a2dbc3c-26b0-44bb-a032-50db3a4143d7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.105546] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03cbd68-b442-4aab-a20d-4431f2fa525e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.108980] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32df79c1-5c03-4bcf-8aef-0575b5891106 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.120980] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee7939b-0309-413d-8cab-d4f41eb451b1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.125277] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 669.126200] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 669.126200] env[68569]: value = "task-3166645" [ 669.126200] env[68569]: _type = "Task" [ 669.126200] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.126609] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79ef323d-a0da-48eb-aee0-5a9ecc14fd28 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.144565] env[68569]: DEBUG nova.compute.provider_tree [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.149783] env[68569]: DEBUG oslo_vmware.api [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Waiting for the task: (returnval){ [ 669.149783] env[68569]: value = "task-3166646" [ 669.149783] env[68569]: _type = "Task" [ 669.149783] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.154155] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166645, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.166889] env[68569]: DEBUG oslo_vmware.api [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': task-3166646, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.238097] env[68569]: DEBUG nova.network.neutron [req-0ddfbd4b-6bbf-4a3c-90aa-51e639cc3ce4 req-ca9e39c7-04ed-4a1b-82ed-1454a95a364c service nova] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Updated VIF entry in instance network info cache for port 192a6986-c831-42da-bce9-c4bde617262e. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 669.238550] env[68569]: DEBUG nova.network.neutron [req-0ddfbd4b-6bbf-4a3c-90aa-51e639cc3ce4 req-ca9e39c7-04ed-4a1b-82ed-1454a95a364c service nova] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Updating instance_info_cache with network_info: [{"id": "192a6986-c831-42da-bce9-c4bde617262e", "address": "fa:16:3e:df:04:4a", "network": {"id": "0ce7bcdf-220a-4073-978c-559b6c03369d", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1220421608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "128ddc15398c4aadb4212393c51670dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap192a6986-c8", "ovs_interfaceid": "192a6986-c831-42da-bce9-c4bde617262e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.326853] env[68569]: DEBUG oslo_vmware.api [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166644, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149172} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.326853] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 669.326853] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 669.326853] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 669.326853] env[68569]: INFO nova.compute.manager [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Took 1.11 seconds to destroy the instance on the hypervisor. [ 669.327803] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 669.327803] env[68569]: DEBUG nova.compute.manager [-] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 669.327803] env[68569]: DEBUG nova.network.neutron [-] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 669.444888] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166642, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.643027] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166645, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.644424] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.651172] env[68569]: DEBUG nova.scheduler.client.report [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 669.664419] env[68569]: DEBUG oslo_vmware.api [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': task-3166646, 'name': PowerOffVM_Task, 'duration_secs': 0.21145} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.664830] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 669.667473] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 669.667473] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89fa7a33-bba0-4495-b8d1-087fd38b7452 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.725184] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 669.725184] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 669.725184] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Deleting the datastore file [datastore1] f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 669.725184] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f000312-facf-4b5c-b5d4-ca291357cc11 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.733806] env[68569]: DEBUG oslo_vmware.api [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Waiting for the task: (returnval){ [ 669.733806] env[68569]: value = "task-3166648" [ 669.733806] env[68569]: _type = "Task" [ 669.733806] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.742528] env[68569]: DEBUG oslo_concurrency.lockutils [req-0ddfbd4b-6bbf-4a3c-90aa-51e639cc3ce4 req-ca9e39c7-04ed-4a1b-82ed-1454a95a364c service nova] Releasing lock "refresh_cache-b2e6de60-b4e5-4030-bca7-355d17fec06d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.743020] env[68569]: DEBUG oslo_vmware.api [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': task-3166648, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.945345] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166642, 'name': CloneVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.070090] env[68569]: DEBUG nova.compute.manager [req-6ca505b3-7f4b-4f4c-a1d1-f67822243b62 req-047d4742-4b60-4e47-9978-a100c80cb0aa service nova] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Received event network-vif-deleted-43722f3d-a2c3-44a7-9b31-b2477b2d126e {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 670.070299] env[68569]: INFO nova.compute.manager [req-6ca505b3-7f4b-4f4c-a1d1-f67822243b62 req-047d4742-4b60-4e47-9978-a100c80cb0aa service nova] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Neutron deleted interface 43722f3d-a2c3-44a7-9b31-b2477b2d126e; detaching it from the instance and deleting it from the info cache [ 670.070478] env[68569]: DEBUG nova.network.neutron [req-6ca505b3-7f4b-4f4c-a1d1-f67822243b62 req-047d4742-4b60-4e47-9978-a100c80cb0aa service nova] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.139652] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166645, 'name': CreateSnapshot_Task, 'duration_secs': 0.524958} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.139920] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 670.141204] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-becaa287-6991-425a-a8d1-5ab65a00d1b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.158865] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.835s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.159419] env[68569]: DEBUG nova.compute.manager [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 670.162979] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.398s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.164473] env[68569]: INFO nova.compute.claims [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.199335] env[68569]: DEBUG nova.network.neutron [-] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.243478] env[68569]: DEBUG oslo_vmware.api [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Task: {'id': task-3166648, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198514} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.244326] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 670.244326] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 670.244463] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 670.244605] env[68569]: INFO nova.compute.manager [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Took 1.14 seconds to destroy the instance on the hypervisor. [ 670.244849] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 670.245030] env[68569]: DEBUG nova.compute.manager [-] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 670.245120] env[68569]: DEBUG nova.network.neutron [-] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 670.449912] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166642, 'name': CloneVM_Task, 'duration_secs': 1.594797} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.449912] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Created linked-clone VM from snapshot [ 670.450343] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33217e1a-8e25-4f32-8d97-828864a177a5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.458260] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Uploading image 228c9055-c5ae-4140-a31b-4221f8cbc68f {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 670.470046] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 670.470326] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e2c678b8-076c-4c62-969c-dc1e3aace602 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.478378] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 670.478378] env[68569]: value = "task-3166649" [ 670.478378] env[68569]: _type = "Task" [ 670.478378] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.486455] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166649, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.573874] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e7790e7-6d04-454e-8730-2e3f47b1c685 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.584060] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de32fde0-bb73-43f9-882b-fe018acb8098 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.617995] env[68569]: DEBUG nova.compute.manager [req-6ca505b3-7f4b-4f4c-a1d1-f67822243b62 req-047d4742-4b60-4e47-9978-a100c80cb0aa service nova] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Detach interface failed, port_id=43722f3d-a2c3-44a7-9b31-b2477b2d126e, reason: Instance 303f41c0-7a19-48b2-a072-4f138f6f8156 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 670.661066] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 670.661421] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-40e608ae-b3b9-4ac2-b4a9-3704f29b5bda {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.665862] env[68569]: DEBUG nova.compute.utils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 670.667622] env[68569]: DEBUG nova.compute.manager [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 670.667948] env[68569]: DEBUG nova.network.neutron [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 670.675626] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 670.675626] env[68569]: value = "task-3166650" [ 670.675626] env[68569]: _type = "Task" [ 670.675626] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.688027] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166650, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.702501] env[68569]: INFO nova.compute.manager [-] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Took 1.39 seconds to deallocate network for instance. [ 670.743129] env[68569]: DEBUG nova.policy [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd2762206641740db911bb37f706e754a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94334816cb6c442c8a06f3bd8917655b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 670.991537] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166649, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.040182] env[68569]: DEBUG nova.network.neutron [-] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.096819] env[68569]: DEBUG nova.network.neutron [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Successfully created port: e3224bc8-bde5-46e7-aad2-c141a0c0b7b7 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 671.169224] env[68569]: DEBUG nova.compute.manager [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 671.195293] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166650, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.211512] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.490636] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166649, 'name': Destroy_Task, 'duration_secs': 0.760366} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.490956] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Destroyed the VM [ 671.491248] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 671.493761] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9a4ade6f-5634-4a6c-89e7-a272569ad4e1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.501739] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 671.501739] env[68569]: value = "task-3166651" [ 671.501739] env[68569]: _type = "Task" [ 671.501739] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.513091] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166651, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.546028] env[68569]: INFO nova.compute.manager [-] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Took 1.30 seconds to deallocate network for instance. [ 671.690886] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166650, 'name': CloneVM_Task} progress is 95%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.745175] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021d73b1-2c7c-4a32-9c34-4dce19018d48 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.752321] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35851042-c832-4488-a107-d17657d9d215 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.786060] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dcc79b3-e776-4ce8-8e6d-6db30f03f7f4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.794229] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff6db2b-055c-4b5f-890e-4090a8178c90 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.808826] env[68569]: DEBUG nova.compute.provider_tree [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.012038] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166651, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.052440] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.101399] env[68569]: DEBUG nova.compute.manager [req-6a3a792f-037f-4fc4-88b9-33055927097e req-f1f49c73-5242-46e0-8b98-2edbac9ade8f service nova] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Received event network-vif-deleted-441ff8db-edf3-4231-bf56-9a1c6b7b348b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 672.179540] env[68569]: DEBUG nova.compute.manager [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 672.194450] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166650, 'name': CloneVM_Task, 'duration_secs': 1.310542} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.194757] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Created linked-clone VM from snapshot [ 672.195637] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c1a79f-9fe7-46be-adcf-90f80ae758d6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.206309] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Uploading image e3f0a360-85ab-43a5-be34-ec8eac9fa1ca {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 672.222699] env[68569]: DEBUG nova.virt.hardware [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 672.222961] env[68569]: DEBUG nova.virt.hardware [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 672.225566] env[68569]: DEBUG nova.virt.hardware [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 672.225566] env[68569]: DEBUG nova.virt.hardware [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 672.225566] env[68569]: DEBUG nova.virt.hardware [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 672.225566] env[68569]: DEBUG nova.virt.hardware [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 672.225566] env[68569]: DEBUG nova.virt.hardware [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 672.225860] env[68569]: DEBUG nova.virt.hardware [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 672.225860] env[68569]: DEBUG nova.virt.hardware [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 672.226046] env[68569]: DEBUG nova.virt.hardware [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 672.226182] env[68569]: DEBUG nova.virt.hardware [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 672.228692] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bdcf974-96d0-4e92-9156-72cc6b7b6088 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.244026] env[68569]: DEBUG oslo_vmware.rw_handles [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 672.244026] env[68569]: value = "vm-633513" [ 672.244026] env[68569]: _type = "VirtualMachine" [ 672.244026] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 672.245666] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0243fbcb-5e49-41c4-ac1f-5e7e03b65848 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.253059] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-55f71c4f-7177-4c34-be6d-9c4a1f0cba35 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.269857] env[68569]: DEBUG oslo_vmware.rw_handles [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lease: (returnval){ [ 672.269857] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e011ad-b0ad-1333-0bc9-56cfa8661e01" [ 672.269857] env[68569]: _type = "HttpNfcLease" [ 672.269857] env[68569]: } obtained for exporting VM: (result){ [ 672.269857] env[68569]: value = "vm-633513" [ 672.269857] env[68569]: _type = "VirtualMachine" [ 672.269857] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 672.270272] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the lease: (returnval){ [ 672.270272] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e011ad-b0ad-1333-0bc9-56cfa8661e01" [ 672.270272] env[68569]: _type = "HttpNfcLease" [ 672.270272] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 672.277091] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 672.277091] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e011ad-b0ad-1333-0bc9-56cfa8661e01" [ 672.277091] env[68569]: _type = "HttpNfcLease" [ 672.277091] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 672.277347] env[68569]: DEBUG oslo_vmware.rw_handles [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 672.277347] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e011ad-b0ad-1333-0bc9-56cfa8661e01" [ 672.277347] env[68569]: _type = "HttpNfcLease" [ 672.277347] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 672.278114] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a812e4d-52fb-49b8-aec5-d3bd45d6598e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.285837] env[68569]: DEBUG oslo_vmware.rw_handles [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee2684-1183-b97e-b911-68900f530984/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 672.286065] env[68569]: DEBUG oslo_vmware.rw_handles [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee2684-1183-b97e-b911-68900f530984/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 672.345188] env[68569]: DEBUG nova.scheduler.client.report [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 672.407363] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9a447c6c-c72c-456a-91fb-a0150dffac1d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.515178] env[68569]: DEBUG oslo_vmware.api [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166651, 'name': RemoveSnapshot_Task, 'duration_secs': 0.983349} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.515178] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 672.765313] env[68569]: DEBUG nova.network.neutron [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Successfully updated port: e3224bc8-bde5-46e7-aad2-c141a0c0b7b7 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 672.853260] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.689s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.853851] env[68569]: DEBUG nova.compute.manager [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 672.857181] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.590s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.858737] env[68569]: INFO nova.compute.claims [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 673.021676] env[68569]: WARNING nova.compute.manager [None req-407fe1ef-4311-4cd0-bcce-e0282fd93765 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Image not found during snapshot: nova.exception.ImageNotFound: Image 228c9055-c5ae-4140-a31b-4221f8cbc68f could not be found. [ 673.272525] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquiring lock "refresh_cache-a29854f9-0096-4b01-9350-bfddee84e2c2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.272525] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquired lock "refresh_cache-a29854f9-0096-4b01-9350-bfddee84e2c2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.272525] env[68569]: DEBUG nova.network.neutron [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 673.363314] env[68569]: DEBUG nova.compute.utils [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 673.367013] env[68569]: DEBUG nova.compute.manager [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 673.367190] env[68569]: DEBUG nova.network.neutron [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 673.418690] env[68569]: DEBUG nova.policy [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5513dfadc9c745d1bd5953f15fd1ee2d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '378a536d9ff14836aa7f971498835b24', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 673.818769] env[68569]: DEBUG nova.network.neutron [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.868698] env[68569]: DEBUG nova.compute.manager [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 673.986277] env[68569]: DEBUG nova.network.neutron [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Successfully created port: b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 674.135936] env[68569]: DEBUG nova.network.neutron [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Updating instance_info_cache with network_info: [{"id": "e3224bc8-bde5-46e7-aad2-c141a0c0b7b7", "address": "fa:16:3e:30:23:78", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.30", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3224bc8-bd", "ovs_interfaceid": "e3224bc8-bde5-46e7-aad2-c141a0c0b7b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.209497] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "239c1217-ba8e-474a-b02c-7d85e3ac92f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.209814] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "239c1217-ba8e-474a-b02c-7d85e3ac92f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.210064] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "239c1217-ba8e-474a-b02c-7d85e3ac92f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.210492] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "239c1217-ba8e-474a-b02c-7d85e3ac92f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.210662] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "239c1217-ba8e-474a-b02c-7d85e3ac92f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.216134] env[68569]: INFO nova.compute.manager [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Terminating instance [ 674.376653] env[68569]: INFO nova.virt.block_device [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Booting with volume b88d036e-c8ce-4222-807f-25716446b927 at /dev/sda [ 674.455482] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01eacec2-c681-4514-98cc-7020842d9846 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.466759] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33b9b62-ba51-46ca-b82a-633db4a0190d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.483388] env[68569]: DEBUG nova.compute.manager [req-585778d5-bb1e-4c4a-9f54-c36d21df0787 req-918cf356-a65d-4c31-8d84-236201b56b4f service nova] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Received event network-vif-plugged-e3224bc8-bde5-46e7-aad2-c141a0c0b7b7 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 674.483617] env[68569]: DEBUG oslo_concurrency.lockutils [req-585778d5-bb1e-4c4a-9f54-c36d21df0787 req-918cf356-a65d-4c31-8d84-236201b56b4f service nova] Acquiring lock "a29854f9-0096-4b01-9350-bfddee84e2c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.483801] env[68569]: DEBUG oslo_concurrency.lockutils [req-585778d5-bb1e-4c4a-9f54-c36d21df0787 req-918cf356-a65d-4c31-8d84-236201b56b4f service nova] Lock "a29854f9-0096-4b01-9350-bfddee84e2c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.483980] env[68569]: DEBUG oslo_concurrency.lockutils [req-585778d5-bb1e-4c4a-9f54-c36d21df0787 req-918cf356-a65d-4c31-8d84-236201b56b4f service nova] Lock "a29854f9-0096-4b01-9350-bfddee84e2c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.484152] env[68569]: DEBUG nova.compute.manager [req-585778d5-bb1e-4c4a-9f54-c36d21df0787 req-918cf356-a65d-4c31-8d84-236201b56b4f service nova] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] No waiting events found dispatching network-vif-plugged-e3224bc8-bde5-46e7-aad2-c141a0c0b7b7 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 674.484313] env[68569]: WARNING nova.compute.manager [req-585778d5-bb1e-4c4a-9f54-c36d21df0787 req-918cf356-a65d-4c31-8d84-236201b56b4f service nova] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Received unexpected event network-vif-plugged-e3224bc8-bde5-46e7-aad2-c141a0c0b7b7 for instance with vm_state building and task_state spawning. [ 674.484474] env[68569]: DEBUG nova.compute.manager [req-585778d5-bb1e-4c4a-9f54-c36d21df0787 req-918cf356-a65d-4c31-8d84-236201b56b4f service nova] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Received event network-changed-e3224bc8-bde5-46e7-aad2-c141a0c0b7b7 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 674.484607] env[68569]: DEBUG nova.compute.manager [req-585778d5-bb1e-4c4a-9f54-c36d21df0787 req-918cf356-a65d-4c31-8d84-236201b56b4f service nova] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Refreshing instance network info cache due to event network-changed-e3224bc8-bde5-46e7-aad2-c141a0c0b7b7. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 674.484770] env[68569]: DEBUG oslo_concurrency.lockutils [req-585778d5-bb1e-4c4a-9f54-c36d21df0787 req-918cf356-a65d-4c31-8d84-236201b56b4f service nova] Acquiring lock "refresh_cache-a29854f9-0096-4b01-9350-bfddee84e2c2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.532543] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b4c9b1af-e20f-4196-a876-1cbc99f62d41 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.543534] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51068c2f-e31b-4043-a650-f7686c89192e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.585825] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39770cc-08c5-4b24-9edb-dbf46b5761dc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.593760] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5f8455-c3fa-471e-950b-7aa07dc0d407 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.611663] env[68569]: DEBUG nova.virt.block_device [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Updating existing volume attachment record: 5b42506a-dc6d-4991-9c66-f9f60bbcda29 {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 674.616235] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0002e31-ff05-46f8-be0c-118c75eb1532 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.625393] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa63d767-cc8e-4dd4-8f9e-42a369122c6c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.662668] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Releasing lock "refresh_cache-a29854f9-0096-4b01-9350-bfddee84e2c2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.663172] env[68569]: DEBUG nova.compute.manager [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Instance network_info: |[{"id": "e3224bc8-bde5-46e7-aad2-c141a0c0b7b7", "address": "fa:16:3e:30:23:78", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.30", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3224bc8-bd", "ovs_interfaceid": "e3224bc8-bde5-46e7-aad2-c141a0c0b7b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 674.663811] env[68569]: DEBUG oslo_concurrency.lockutils [req-585778d5-bb1e-4c4a-9f54-c36d21df0787 req-918cf356-a65d-4c31-8d84-236201b56b4f service nova] Acquired lock "refresh_cache-a29854f9-0096-4b01-9350-bfddee84e2c2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.664068] env[68569]: DEBUG nova.network.neutron [req-585778d5-bb1e-4c4a-9f54-c36d21df0787 req-918cf356-a65d-4c31-8d84-236201b56b4f service nova] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Refreshing network info cache for port e3224bc8-bde5-46e7-aad2-c141a0c0b7b7 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 674.665513] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:23:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3224bc8-bde5-46e7-aad2-c141a0c0b7b7', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 674.673647] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 674.677199] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0465ce3-7191-45f3-8cd3-4558b87873f8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.681391] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 674.681788] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7a16e06-e533-45e8-98b5-cec4688743f8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.706129] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7ff23e-53d5-43a0-9f11-9f598356120b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.712502] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 674.712502] env[68569]: value = "task-3166653" [ 674.712502] env[68569]: _type = "Task" [ 674.712502] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.726768] env[68569]: DEBUG nova.compute.manager [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 674.727050] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 674.730060] env[68569]: DEBUG nova.compute.provider_tree [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.730060] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e322aa66-5b4e-4fbe-a0c6-93fa3e7f60dc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.740359] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166653, 'name': CreateVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.748521] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 674.748934] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c13712a2-775c-4efa-9a07-466b85b89fb9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.758078] env[68569]: DEBUG oslo_vmware.api [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 674.758078] env[68569]: value = "task-3166654" [ 674.758078] env[68569]: _type = "Task" [ 674.758078] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.768637] env[68569]: DEBUG oslo_vmware.api [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166654, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.993348] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Acquiring lock "98efdafe-e02b-46ca-a701-b70042513128" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.993599] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Lock "98efdafe-e02b-46ca-a701-b70042513128" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.228341] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166653, 'name': CreateVM_Task, 'duration_secs': 0.442532} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.228559] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 675.229279] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.229487] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.229882] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 675.230204] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-401a095a-55d7-4f2f-a5c4-74d4119bb83c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.234026] env[68569]: DEBUG nova.scheduler.client.report [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 675.239014] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 675.239014] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52571bcf-6720-44ea-d0b3-f204794c3e69" [ 675.239014] env[68569]: _type = "Task" [ 675.239014] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.249181] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52571bcf-6720-44ea-d0b3-f204794c3e69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.271312] env[68569]: DEBUG oslo_vmware.api [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166654, 'name': PowerOffVM_Task, 'duration_secs': 0.249171} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.271701] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 675.271779] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 675.272093] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-428348d6-3a02-47e9-8397-55c8c64985f6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.342371] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 675.342668] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 675.342919] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Deleting the datastore file [datastore1] 239c1217-ba8e-474a-b02c-7d85e3ac92f4 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 675.343179] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd7c3606-8756-4007-a322-72b11c88bb88 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.355795] env[68569]: DEBUG oslo_vmware.api [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 675.355795] env[68569]: value = "task-3166656" [ 675.355795] env[68569]: _type = "Task" [ 675.355795] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.366036] env[68569]: DEBUG oslo_vmware.api [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166656, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.422467] env[68569]: DEBUG nova.network.neutron [req-585778d5-bb1e-4c4a-9f54-c36d21df0787 req-918cf356-a65d-4c31-8d84-236201b56b4f service nova] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Updated VIF entry in instance network info cache for port e3224bc8-bde5-46e7-aad2-c141a0c0b7b7. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 675.422953] env[68569]: DEBUG nova.network.neutron [req-585778d5-bb1e-4c4a-9f54-c36d21df0787 req-918cf356-a65d-4c31-8d84-236201b56b4f service nova] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Updating instance_info_cache with network_info: [{"id": "e3224bc8-bde5-46e7-aad2-c141a0c0b7b7", "address": "fa:16:3e:30:23:78", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.30", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3224bc8-bd", "ovs_interfaceid": "e3224bc8-bde5-46e7-aad2-c141a0c0b7b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.740704] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.883s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.741320] env[68569]: DEBUG nova.compute.manager [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 675.746138] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.250s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.746138] env[68569]: DEBUG nova.objects.instance [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Lazy-loading 'resources' on Instance uuid 1c8dfb47-df19-4101-8d4e-30889d71d7da {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 675.755807] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52571bcf-6720-44ea-d0b3-f204794c3e69, 'name': SearchDatastore_Task, 'duration_secs': 0.013182} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.756685] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.756918] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 675.757166] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.757314] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.757503] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 675.758018] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5fed24d6-8fa1-400a-b495-c985a06bfbc0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.768122] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 675.768122] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 675.768861] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d129bbb3-10d0-40f0-a31b-aaa33c2a0fc1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.775932] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 675.775932] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5261e93a-7c6f-c289-897d-1ac8d7dd5f79" [ 675.775932] env[68569]: _type = "Task" [ 675.775932] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.788819] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5261e93a-7c6f-c289-897d-1ac8d7dd5f79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.789828] env[68569]: DEBUG nova.network.neutron [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Successfully updated port: b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 675.873057] env[68569]: DEBUG oslo_vmware.api [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166656, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163804} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.873057] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 675.873057] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 675.873057] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 675.873057] env[68569]: INFO nova.compute.manager [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Took 1.15 seconds to destroy the instance on the hypervisor. [ 675.873374] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 675.873912] env[68569]: DEBUG nova.compute.manager [-] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 675.873912] env[68569]: DEBUG nova.network.neutron [-] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 675.925866] env[68569]: DEBUG oslo_concurrency.lockutils [req-585778d5-bb1e-4c4a-9f54-c36d21df0787 req-918cf356-a65d-4c31-8d84-236201b56b4f service nova] Releasing lock "refresh_cache-a29854f9-0096-4b01-9350-bfddee84e2c2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.250993] env[68569]: DEBUG nova.compute.utils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 676.255541] env[68569]: DEBUG nova.compute.manager [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 676.255541] env[68569]: DEBUG nova.network.neutron [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 676.288668] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5261e93a-7c6f-c289-897d-1ac8d7dd5f79, 'name': SearchDatastore_Task, 'duration_secs': 0.012855} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.293982] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cffb3b26-6990-46d2-8e92-23931bace191 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.299743] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Acquiring lock "refresh_cache-2cde3729-1be6-42c5-891f-42a7a8bff267" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.299902] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Acquired lock "refresh_cache-2cde3729-1be6-42c5-891f-42a7a8bff267" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.300067] env[68569]: DEBUG nova.network.neutron [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 676.305141] env[68569]: DEBUG nova.policy [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a04470ed28074c2180e39df4ff9d9b5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef27055c27a04f7e9199b9c02efa7fcf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 676.311654] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 676.311654] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5223afea-67a0-49b1-a172-243a2cb8f86a" [ 676.311654] env[68569]: _type = "Task" [ 676.311654] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.330891] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5223afea-67a0-49b1-a172-243a2cb8f86a, 'name': SearchDatastore_Task, 'duration_secs': 0.01821} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.331348] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.331612] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] a29854f9-0096-4b01-9350-bfddee84e2c2/a29854f9-0096-4b01-9350-bfddee84e2c2.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 676.332169] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d868f103-636f-401c-8140-e416ae91f8d1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.343259] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 676.343259] env[68569]: value = "task-3166657" [ 676.343259] env[68569]: _type = "Task" [ 676.343259] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.355721] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166657, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.510898] env[68569]: DEBUG nova.compute.manager [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Received event network-vif-plugged-b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 676.511142] env[68569]: DEBUG oslo_concurrency.lockutils [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] Acquiring lock "2cde3729-1be6-42c5-891f-42a7a8bff267-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.511356] env[68569]: DEBUG oslo_concurrency.lockutils [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] Lock "2cde3729-1be6-42c5-891f-42a7a8bff267-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.511530] env[68569]: DEBUG oslo_concurrency.lockutils [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] Lock "2cde3729-1be6-42c5-891f-42a7a8bff267-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.511677] env[68569]: DEBUG nova.compute.manager [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] No waiting events found dispatching network-vif-plugged-b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 676.511924] env[68569]: WARNING nova.compute.manager [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Received unexpected event network-vif-plugged-b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4 for instance with vm_state building and task_state spawning. [ 676.512170] env[68569]: DEBUG nova.compute.manager [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Received event network-changed-b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 676.512309] env[68569]: DEBUG nova.compute.manager [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Refreshing instance network info cache due to event network-changed-b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 676.512445] env[68569]: DEBUG oslo_concurrency.lockutils [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] Acquiring lock "refresh_cache-2cde3729-1be6-42c5-891f-42a7a8bff267" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.654400] env[68569]: DEBUG nova.network.neutron [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Successfully created port: f467ee4f-f180-4173-a745-c02a31527645 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 676.664676] env[68569]: DEBUG nova.network.neutron [-] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.734938] env[68569]: DEBUG nova.compute.manager [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 676.735585] env[68569]: DEBUG nova.virt.hardware [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 676.735808] env[68569]: DEBUG nova.virt.hardware [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 676.735961] env[68569]: DEBUG nova.virt.hardware [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 676.736158] env[68569]: DEBUG nova.virt.hardware [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 676.736365] env[68569]: DEBUG nova.virt.hardware [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 676.736557] env[68569]: DEBUG nova.virt.hardware [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 676.736925] env[68569]: DEBUG nova.virt.hardware [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 676.737027] env[68569]: DEBUG nova.virt.hardware [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 676.738050] env[68569]: DEBUG nova.virt.hardware [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 676.738050] env[68569]: DEBUG nova.virt.hardware [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 676.738050] env[68569]: DEBUG nova.virt.hardware [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 676.738990] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c650aa-fed8-49f1-b7ee-11d7b434f980 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.749209] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e676fd-85f5-40fb-af89-99567350bd6c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.756563] env[68569]: DEBUG nova.compute.manager [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 676.859834] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166657, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.899474] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0acf92a5-e515-4e01-aa83-382b66d3d3ef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.908796] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39439ede-2400-4eb4-904d-540ddf923c79 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.945275] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34cb30ae-afb9-453d-96c8-3828a1216d6e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.954636] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c511f7f-37b3-4054-a38b-0440a0444ef9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.970011] env[68569]: DEBUG nova.compute.provider_tree [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.090992] env[68569]: DEBUG nova.network.neutron [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 677.166550] env[68569]: INFO nova.compute.manager [-] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Took 1.29 seconds to deallocate network for instance. [ 677.357465] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166657, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571796} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.358557] env[68569]: DEBUG nova.network.neutron [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Updating instance_info_cache with network_info: [{"id": "b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4", "address": "fa:16:3e:17:3e:41", "network": {"id": "c0d55a28-4420-4fa0-85dc-7e503881d235", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1758956017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "378a536d9ff14836aa7f971498835b24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8051e6d-d6", "ovs_interfaceid": "b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.359769] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] a29854f9-0096-4b01-9350-bfddee84e2c2/a29854f9-0096-4b01-9350-bfddee84e2c2.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 677.360041] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 677.360510] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b34d8fb4-71f2-47c2-bad6-3ae34e8c80b1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.371365] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 677.371365] env[68569]: value = "task-3166658" [ 677.371365] env[68569]: _type = "Task" [ 677.371365] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.383707] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166658, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.473701] env[68569]: DEBUG nova.scheduler.client.report [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 677.675597] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.768588] env[68569]: DEBUG nova.compute.manager [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 677.791557] env[68569]: DEBUG nova.virt.hardware [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 677.791790] env[68569]: DEBUG nova.virt.hardware [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 677.791943] env[68569]: DEBUG nova.virt.hardware [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 677.792136] env[68569]: DEBUG nova.virt.hardware [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 677.792282] env[68569]: DEBUG nova.virt.hardware [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 677.792426] env[68569]: DEBUG nova.virt.hardware [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 677.792630] env[68569]: DEBUG nova.virt.hardware [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 677.792787] env[68569]: DEBUG nova.virt.hardware [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 677.793032] env[68569]: DEBUG nova.virt.hardware [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 677.793221] env[68569]: DEBUG nova.virt.hardware [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 677.793394] env[68569]: DEBUG nova.virt.hardware [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 677.794804] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262de24f-a0f4-45d3-8783-907221a2adda {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.803416] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affc1f9f-c8af-4dd9-927a-926a02d38503 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.861725] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Releasing lock "refresh_cache-2cde3729-1be6-42c5-891f-42a7a8bff267" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 677.862028] env[68569]: DEBUG nova.compute.manager [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Instance network_info: |[{"id": "b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4", "address": "fa:16:3e:17:3e:41", "network": {"id": "c0d55a28-4420-4fa0-85dc-7e503881d235", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1758956017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "378a536d9ff14836aa7f971498835b24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8051e6d-d6", "ovs_interfaceid": "b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 677.862356] env[68569]: DEBUG oslo_concurrency.lockutils [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] Acquired lock "refresh_cache-2cde3729-1be6-42c5-891f-42a7a8bff267" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 677.862541] env[68569]: DEBUG nova.network.neutron [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Refreshing network info cache for port b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 677.863836] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:3e:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '496ac502-bfc4-4324-8332-cac473eb7cc4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 677.871667] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Creating folder: Project (378a536d9ff14836aa7f971498835b24). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 677.874904] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-113bd448-6e5e-49ff-b2c9-edbe6b2b861d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.886125] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166658, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103963} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.886212] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 677.886949] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8638d8-fb80-4f3b-a5ac-1657367f9565 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.920412] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] a29854f9-0096-4b01-9350-bfddee84e2c2/a29854f9-0096-4b01-9350-bfddee84e2c2.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 677.925690] env[68569]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 677.925844] env[68569]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68569) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 677.926185] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da1602a4-f1b9-49f7-ba5f-c2f3c1cda5e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.950549] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Folder already exists: Project (378a536d9ff14836aa7f971498835b24). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 677.950844] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Creating folder: Instances. Parent ref: group-v633468. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 677.951634] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c18d04cc-df70-4eb4-bc43-ce161d7f68eb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.964608] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 677.964608] env[68569]: value = "task-3166661" [ 677.964608] env[68569]: _type = "Task" [ 677.964608] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.970185] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Created folder: Instances in parent group-v633468. [ 677.970441] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 677.971128] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 677.971414] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c525ef76-ef50-46da-ba3b-1142e324040f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.990056] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.246s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.992644] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166661, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.995499] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.725s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.997038] env[68569]: INFO nova.compute.claims [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 678.006844] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 678.006844] env[68569]: value = "task-3166662" [ 678.006844] env[68569]: _type = "Task" [ 678.006844] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.016559] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166662, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.019585] env[68569]: INFO nova.scheduler.client.report [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Deleted allocations for instance 1c8dfb47-df19-4101-8d4e-30889d71d7da [ 678.155089] env[68569]: DEBUG nova.network.neutron [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Updated VIF entry in instance network info cache for port b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 678.155558] env[68569]: DEBUG nova.network.neutron [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Updating instance_info_cache with network_info: [{"id": "b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4", "address": "fa:16:3e:17:3e:41", "network": {"id": "c0d55a28-4420-4fa0-85dc-7e503881d235", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1758956017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "378a536d9ff14836aa7f971498835b24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8051e6d-d6", "ovs_interfaceid": "b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.476142] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166661, 'name': ReconfigVM_Task, 'duration_secs': 0.375146} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.476483] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Reconfigured VM instance instance-0000001a to attach disk [datastore2] a29854f9-0096-4b01-9350-bfddee84e2c2/a29854f9-0096-4b01-9350-bfddee84e2c2.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 678.477233] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c531b0ed-34ea-4f63-9261-6b8e16dccd11 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.486595] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 678.486595] env[68569]: value = "task-3166663" [ 678.486595] env[68569]: _type = "Task" [ 678.486595] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.496362] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166663, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.518461] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166662, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.531224] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bb6de4b1-3a7f-4c9c-a1a4-c7ea7abe5ca1 tempest-ServerTagsTestJSON-795348598 tempest-ServerTagsTestJSON-795348598-project-member] Lock "1c8dfb47-df19-4101-8d4e-30889d71d7da" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.530s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 678.596414] env[68569]: DEBUG nova.compute.manager [req-604a60db-6875-4ea1-8585-14d6627c115e req-9960ff4c-2a76-4ee7-89b6-42d1d6047ade service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Received event network-vif-plugged-f467ee4f-f180-4173-a745-c02a31527645 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 678.596637] env[68569]: DEBUG oslo_concurrency.lockutils [req-604a60db-6875-4ea1-8585-14d6627c115e req-9960ff4c-2a76-4ee7-89b6-42d1d6047ade service nova] Acquiring lock "bda7e09b-848b-4d5d-a49d-6e0639f22f99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.596844] env[68569]: DEBUG oslo_concurrency.lockutils [req-604a60db-6875-4ea1-8585-14d6627c115e req-9960ff4c-2a76-4ee7-89b6-42d1d6047ade service nova] Lock "bda7e09b-848b-4d5d-a49d-6e0639f22f99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 678.597012] env[68569]: DEBUG oslo_concurrency.lockutils [req-604a60db-6875-4ea1-8585-14d6627c115e req-9960ff4c-2a76-4ee7-89b6-42d1d6047ade service nova] Lock "bda7e09b-848b-4d5d-a49d-6e0639f22f99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 678.597322] env[68569]: DEBUG nova.compute.manager [req-604a60db-6875-4ea1-8585-14d6627c115e req-9960ff4c-2a76-4ee7-89b6-42d1d6047ade service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] No waiting events found dispatching network-vif-plugged-f467ee4f-f180-4173-a745-c02a31527645 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 678.597495] env[68569]: WARNING nova.compute.manager [req-604a60db-6875-4ea1-8585-14d6627c115e req-9960ff4c-2a76-4ee7-89b6-42d1d6047ade service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Received unexpected event network-vif-plugged-f467ee4f-f180-4173-a745-c02a31527645 for instance with vm_state building and task_state spawning. [ 678.661362] env[68569]: DEBUG oslo_concurrency.lockutils [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] Releasing lock "refresh_cache-2cde3729-1be6-42c5-891f-42a7a8bff267" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 678.661635] env[68569]: DEBUG nova.compute.manager [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Received event network-vif-deleted-7ec66ae2-2713-4784-8a3e-bba562877a03 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 678.661810] env[68569]: INFO nova.compute.manager [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Neutron deleted interface 7ec66ae2-2713-4784-8a3e-bba562877a03; detaching it from the instance and deleting it from the info cache [ 678.661980] env[68569]: DEBUG nova.network.neutron [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.846347] env[68569]: DEBUG nova.network.neutron [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Successfully updated port: f467ee4f-f180-4173-a745-c02a31527645 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 679.001738] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166663, 'name': Rename_Task, 'duration_secs': 0.182371} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.002121] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 679.002418] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1309a6cd-71a2-4aa0-9358-44efaa9aa957 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.012979] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 679.012979] env[68569]: value = "task-3166664" [ 679.012979] env[68569]: _type = "Task" [ 679.012979] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.020031] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166662, 'name': CreateVM_Task, 'duration_secs': 0.852225} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.020735] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 679.021503] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633474', 'volume_id': 'b88d036e-c8ce-4222-807f-25716446b927', 'name': 'volume-b88d036e-c8ce-4222-807f-25716446b927', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2cde3729-1be6-42c5-891f-42a7a8bff267', 'attached_at': '', 'detached_at': '', 'volume_id': 'b88d036e-c8ce-4222-807f-25716446b927', 'serial': 'b88d036e-c8ce-4222-807f-25716446b927'}, 'delete_on_termination': True, 'disk_bus': None, 'boot_index': 0, 'mount_device': '/dev/sda', 'guest_format': None, 'attachment_id': '5b42506a-dc6d-4991-9c66-f9f60bbcda29', 'volume_type': None}], 'swap': None} {{(pid=68569) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 679.021807] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Root volume attach. Driver type: vmdk {{(pid=68569) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 679.022680] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d06617-d4f4-422f-a27c-6b8cf29e1d3d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.029446] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166664, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.041770] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b923982-4152-4f2e-a5ec-068c1135a185 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.049999] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8be89df-dfa5-472b-bb44-62aeb0f73053 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.060384] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-928e72d6-d623-4241-83ad-e644e74f7617 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.069941] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Waiting for the task: (returnval){ [ 679.069941] env[68569]: value = "task-3166665" [ 679.069941] env[68569]: _type = "Task" [ 679.069941] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.085577] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166665, 'name': RelocateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.166993] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32d21a09-b153-4ccb-8dd8-7142c7b88a60 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.177868] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f438770e-73fc-468e-9a93-b1f86ef66d1b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.222040] env[68569]: DEBUG nova.compute.manager [req-377fe890-850a-4ac5-b1eb-ed51731c7af3 req-d04e633f-45bc-4b00-81a6-e0a3c5375854 service nova] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Detach interface failed, port_id=7ec66ae2-2713-4784-8a3e-bba562877a03, reason: Instance 239c1217-ba8e-474a-b02c-7d85e3ac92f4 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 679.350872] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "refresh_cache-bda7e09b-848b-4d5d-a49d-6e0639f22f99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.352629] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquired lock "refresh_cache-bda7e09b-848b-4d5d-a49d-6e0639f22f99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.352629] env[68569]: DEBUG nova.network.neutron [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.530260] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166664, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.588683] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166665, 'name': RelocateVM_Task} progress is 42%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.745324] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609daa54-9941-4b7f-8b19-507c7c7161e3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.755380] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d2f494-2245-4e03-8f5f-83d8296df4bf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.793313] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc63a85-0e89-4054-b96f-0392b745cff4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.803705] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd3b583-cd70-4745-9664-c67563925e0a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.822122] env[68569]: DEBUG nova.compute.provider_tree [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 679.912074] env[68569]: DEBUG nova.network.neutron [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.034354] env[68569]: DEBUG oslo_vmware.api [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166664, 'name': PowerOnVM_Task, 'duration_secs': 0.973322} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.034701] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 680.034941] env[68569]: INFO nova.compute.manager [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Took 7.86 seconds to spawn the instance on the hypervisor. [ 680.035162] env[68569]: DEBUG nova.compute.manager [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 680.039884] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb1a4e9-bcfb-48c3-978c-223ca10ada48 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.085920] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166665, 'name': RelocateVM_Task} progress is 54%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.328033] env[68569]: DEBUG nova.scheduler.client.report [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 680.341829] env[68569]: DEBUG nova.network.neutron [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Updating instance_info_cache with network_info: [{"id": "f467ee4f-f180-4173-a745-c02a31527645", "address": "fa:16:3e:a9:29:c7", "network": {"id": "5f571618-9e86-414b-9df9-ef4f3e050fda", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-716941586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef27055c27a04f7e9199b9c02efa7fcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf467ee4f-f1", "ovs_interfaceid": "f467ee4f-f180-4173-a745-c02a31527645", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.399711] env[68569]: DEBUG oslo_vmware.rw_handles [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee2684-1183-b97e-b911-68900f530984/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 680.400993] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46bf2bb3-9c45-46e1-9f37-a7644464a225 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.410067] env[68569]: DEBUG oslo_vmware.rw_handles [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee2684-1183-b97e-b911-68900f530984/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 680.410233] env[68569]: ERROR oslo_vmware.rw_handles [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee2684-1183-b97e-b911-68900f530984/disk-0.vmdk due to incomplete transfer. [ 680.410441] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a73b1785-8722-4997-a914-c69853dfcec3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.420372] env[68569]: DEBUG oslo_vmware.rw_handles [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee2684-1183-b97e-b911-68900f530984/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 680.420717] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Uploaded image e3f0a360-85ab-43a5-be34-ec8eac9fa1ca to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 680.423115] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 680.423775] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c1418205-e2e6-4caf-8653-52f424e6cd38 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.432748] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 680.432748] env[68569]: value = "task-3166666" [ 680.432748] env[68569]: _type = "Task" [ 680.432748] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.447470] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166666, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.563104] env[68569]: INFO nova.compute.manager [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Took 41.59 seconds to build instance. [ 680.585467] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166665, 'name': RelocateVM_Task} progress is 67%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.667384] env[68569]: DEBUG nova.compute.manager [req-5fc6cdec-d0f2-447d-9e8b-271e9810d8f1 req-78e6b73b-d122-4af0-856a-cad69e9dd07e service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Received event network-changed-f467ee4f-f180-4173-a745-c02a31527645 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 680.667576] env[68569]: DEBUG nova.compute.manager [req-5fc6cdec-d0f2-447d-9e8b-271e9810d8f1 req-78e6b73b-d122-4af0-856a-cad69e9dd07e service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Refreshing instance network info cache due to event network-changed-f467ee4f-f180-4173-a745-c02a31527645. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 680.671030] env[68569]: DEBUG oslo_concurrency.lockutils [req-5fc6cdec-d0f2-447d-9e8b-271e9810d8f1 req-78e6b73b-d122-4af0-856a-cad69e9dd07e service nova] Acquiring lock "refresh_cache-bda7e09b-848b-4d5d-a49d-6e0639f22f99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.832589] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.836s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.832589] env[68569]: DEBUG nova.compute.manager [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 680.834926] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.657s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.836878] env[68569]: INFO nova.compute.claims [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 680.845814] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Releasing lock "refresh_cache-bda7e09b-848b-4d5d-a49d-6e0639f22f99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.852012] env[68569]: DEBUG nova.compute.manager [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Instance network_info: |[{"id": "f467ee4f-f180-4173-a745-c02a31527645", "address": "fa:16:3e:a9:29:c7", "network": {"id": "5f571618-9e86-414b-9df9-ef4f3e050fda", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-716941586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef27055c27a04f7e9199b9c02efa7fcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf467ee4f-f1", "ovs_interfaceid": "f467ee4f-f180-4173-a745-c02a31527645", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 680.852012] env[68569]: DEBUG oslo_concurrency.lockutils [req-5fc6cdec-d0f2-447d-9e8b-271e9810d8f1 req-78e6b73b-d122-4af0-856a-cad69e9dd07e service nova] Acquired lock "refresh_cache-bda7e09b-848b-4d5d-a49d-6e0639f22f99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.852315] env[68569]: DEBUG nova.network.neutron [req-5fc6cdec-d0f2-447d-9e8b-271e9810d8f1 req-78e6b73b-d122-4af0-856a-cad69e9dd07e service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Refreshing network info cache for port f467ee4f-f180-4173-a745-c02a31527645 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 680.852315] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:29:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b4c5eb94-841c-4713-985a-8fc4117fbaf1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f467ee4f-f180-4173-a745-c02a31527645', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 680.864212] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 680.866904] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 680.867549] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-842f6e3b-4062-4b3d-865e-bdf19fb86978 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.893642] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 680.893642] env[68569]: value = "task-3166667" [ 680.893642] env[68569]: _type = "Task" [ 680.893642] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.902802] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166667, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.945959] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166666, 'name': Destroy_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.065432] env[68569]: DEBUG oslo_concurrency.lockutils [None req-02797725-6649-4bfe-9250-9ff287ca061e tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Lock "a29854f9-0096-4b01-9350-bfddee84e2c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.940s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.086036] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166665, 'name': RelocateVM_Task} progress is 81%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.269856] env[68569]: DEBUG nova.network.neutron [req-5fc6cdec-d0f2-447d-9e8b-271e9810d8f1 req-78e6b73b-d122-4af0-856a-cad69e9dd07e service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Updated VIF entry in instance network info cache for port f467ee4f-f180-4173-a745-c02a31527645. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 681.270266] env[68569]: DEBUG nova.network.neutron [req-5fc6cdec-d0f2-447d-9e8b-271e9810d8f1 req-78e6b73b-d122-4af0-856a-cad69e9dd07e service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Updating instance_info_cache with network_info: [{"id": "f467ee4f-f180-4173-a745-c02a31527645", "address": "fa:16:3e:a9:29:c7", "network": {"id": "5f571618-9e86-414b-9df9-ef4f3e050fda", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-716941586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef27055c27a04f7e9199b9c02efa7fcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf467ee4f-f1", "ovs_interfaceid": "f467ee4f-f180-4173-a745-c02a31527645", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.350558] env[68569]: DEBUG nova.compute.utils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 681.352427] env[68569]: DEBUG nova.compute.manager [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 681.352633] env[68569]: DEBUG nova.network.neutron [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 681.409496] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166667, 'name': CreateVM_Task, 'duration_secs': 0.50173} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.414300] env[68569]: DEBUG nova.policy [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b76740ad96b847ea900f3f8a7334dc57', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1f431a9f7b741219e2edc8fccd9276b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 681.414300] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 681.414300] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.414300] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.414595] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 681.415303] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-640c4afe-5765-4678-9fcc-8ffbe1f8afce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.421579] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 681.421579] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5256071c-adfd-6f25-0aa7-27beb3022af2" [ 681.421579] env[68569]: _type = "Task" [ 681.421579] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.434678] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5256071c-adfd-6f25-0aa7-27beb3022af2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.444885] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166666, 'name': Destroy_Task, 'duration_secs': 0.767264} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.445401] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Destroyed the VM [ 681.445721] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 681.446108] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b067a27e-af39-498f-be1f-8255ac81b5e6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.454210] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 681.454210] env[68569]: value = "task-3166668" [ 681.454210] env[68569]: _type = "Task" [ 681.454210] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.467739] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166668, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.568955] env[68569]: DEBUG nova.compute.manager [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 681.585751] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166665, 'name': RelocateVM_Task} progress is 95%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.773772] env[68569]: DEBUG oslo_concurrency.lockutils [req-5fc6cdec-d0f2-447d-9e8b-271e9810d8f1 req-78e6b73b-d122-4af0-856a-cad69e9dd07e service nova] Releasing lock "refresh_cache-bda7e09b-848b-4d5d-a49d-6e0639f22f99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.805627] env[68569]: DEBUG nova.network.neutron [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Successfully created port: 67421e24-0bc2-4e26-a6ac-551a8eb20275 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 681.858603] env[68569]: DEBUG nova.compute.manager [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 681.934736] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5256071c-adfd-6f25-0aa7-27beb3022af2, 'name': SearchDatastore_Task, 'duration_secs': 0.01803} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.935011] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.935310] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 681.935611] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.935831] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.936097] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 681.936431] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2007349b-1efc-4935-bb3b-a330ed5c8b58 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.949911] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 681.949911] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 681.949911] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c934ba4f-f3a9-46d6-be26-27da1dee4426 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.959728] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 681.959728] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52eeb21a-1788-fdc0-9318-d2f5437d0b7e" [ 681.959728] env[68569]: _type = "Task" [ 681.959728] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.969546] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166668, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.976423] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52eeb21a-1788-fdc0-9318-d2f5437d0b7e, 'name': SearchDatastore_Task, 'duration_secs': 0.013295} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.977240] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0f9215e-0948-43ff-846c-795c7e9b0523 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.984834] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 681.984834] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520f31ec-2757-2d56-e0f1-9d686422151c" [ 681.984834] env[68569]: _type = "Task" [ 681.984834] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.994711] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520f31ec-2757-2d56-e0f1-9d686422151c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.089876] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166665, 'name': RelocateVM_Task} progress is 98%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.096537] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.147379] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquiring lock "a29854f9-0096-4b01-9350-bfddee84e2c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.147654] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Lock "a29854f9-0096-4b01-9350-bfddee84e2c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.149524] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquiring lock "a29854f9-0096-4b01-9350-bfddee84e2c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.149524] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Lock "a29854f9-0096-4b01-9350-bfddee84e2c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.149524] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Lock "a29854f9-0096-4b01-9350-bfddee84e2c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.156947] env[68569]: INFO nova.compute.manager [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Terminating instance [ 682.473193] env[68569]: DEBUG oslo_vmware.api [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166668, 'name': RemoveSnapshot_Task, 'duration_secs': 0.562217} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.473193] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 682.473193] env[68569]: INFO nova.compute.manager [None req-6b6656b4-0536-4cc0-af0b-d198ed7d2b03 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Took 13.93 seconds to snapshot the instance on the hypervisor. [ 682.500458] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520f31ec-2757-2d56-e0f1-9d686422151c, 'name': SearchDatastore_Task, 'duration_secs': 0.018788} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.500458] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.500458] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] bda7e09b-848b-4d5d-a49d-6e0639f22f99/bda7e09b-848b-4d5d-a49d-6e0639f22f99.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 682.500458] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae723c87-c375-4ad4-ada6-10170a555a03 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.513047] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 682.513047] env[68569]: value = "task-3166669" [ 682.513047] env[68569]: _type = "Task" [ 682.513047] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.523369] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.588188] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166665, 'name': RelocateVM_Task, 'duration_secs': 3.248271} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.588532] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 682.588532] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633474', 'volume_id': 'b88d036e-c8ce-4222-807f-25716446b927', 'name': 'volume-b88d036e-c8ce-4222-807f-25716446b927', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2cde3729-1be6-42c5-891f-42a7a8bff267', 'attached_at': '', 'detached_at': '', 'volume_id': 'b88d036e-c8ce-4222-807f-25716446b927', 'serial': 'b88d036e-c8ce-4222-807f-25716446b927'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 682.589964] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7010e605-8e03-4d44-8578-870636ee3dc3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.607534] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1626a35-8140-43bc-a45c-9b5e10aad449 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.610616] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a70564-4614-4ef4-80d5-b2c3a2936b1f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.627488] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588fb1a7-014e-4c5d-9a9e-2d6bc24062ce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.639443] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] volume-b88d036e-c8ce-4222-807f-25716446b927/volume-b88d036e-c8ce-4222-807f-25716446b927.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 682.639773] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc64871c-13e1-476e-968a-7346fbac517b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.684638] env[68569]: DEBUG nova.compute.manager [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 682.684881] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 682.687750] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4c10b9-1bed-4988-a94f-322fd91e4c61 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.691466] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d99253d-251a-4be0-a5f7-0a085e8f457c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.694071] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Waiting for the task: (returnval){ [ 682.694071] env[68569]: value = "task-3166670" [ 682.694071] env[68569]: _type = "Task" [ 682.694071] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.703739] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56fb2d0-c42e-4e21-b025-90e62b6231f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.708834] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 682.712371] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32ae5144-8e2b-4b52-b3dc-edabac93ebb9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.714072] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166670, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.729146] env[68569]: DEBUG nova.compute.provider_tree [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.729863] env[68569]: DEBUG oslo_vmware.api [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 682.729863] env[68569]: value = "task-3166671" [ 682.729863] env[68569]: _type = "Task" [ 682.729863] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.739508] env[68569]: DEBUG oslo_vmware.api [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.867954] env[68569]: DEBUG nova.compute.manager [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 682.899099] env[68569]: DEBUG nova.virt.hardware [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 682.899294] env[68569]: DEBUG nova.virt.hardware [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 682.899494] env[68569]: DEBUG nova.virt.hardware [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 682.899635] env[68569]: DEBUG nova.virt.hardware [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 682.899777] env[68569]: DEBUG nova.virt.hardware [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 682.899918] env[68569]: DEBUG nova.virt.hardware [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 682.900380] env[68569]: DEBUG nova.virt.hardware [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 682.900585] env[68569]: DEBUG nova.virt.hardware [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 682.900759] env[68569]: DEBUG nova.virt.hardware [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 682.900925] env[68569]: DEBUG nova.virt.hardware [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 682.901115] env[68569]: DEBUG nova.virt.hardware [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 682.902445] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4db7cb-adc2-42dd-8117-e5d3df4acf43 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.913755] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15195b02-6822-4313-900b-81eefcbfe73a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.024931] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166669, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509178} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.025211] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] bda7e09b-848b-4d5d-a49d-6e0639f22f99/bda7e09b-848b-4d5d-a49d-6e0639f22f99.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 683.025472] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 683.026500] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b8cb869-06d5-4da7-9aae-4f23c82fc9c9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.035717] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 683.035717] env[68569]: value = "task-3166672" [ 683.035717] env[68569]: _type = "Task" [ 683.035717] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.048037] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166672, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.206970] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166670, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.233371] env[68569]: DEBUG nova.scheduler.client.report [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 683.246911] env[68569]: DEBUG oslo_vmware.api [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166671, 'name': PowerOffVM_Task, 'duration_secs': 0.385042} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.247242] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 683.247429] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 683.249173] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28b51d7c-7e09-4e45-8aeb-73985eea1010 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.319069] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 683.319353] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 683.319562] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Deleting the datastore file [datastore2] a29854f9-0096-4b01-9350-bfddee84e2c2 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 683.320518] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1fc22cc9-b348-4db5-a050-9753a51c98a3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.331456] env[68569]: DEBUG oslo_vmware.api [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for the task: (returnval){ [ 683.331456] env[68569]: value = "task-3166674" [ 683.331456] env[68569]: _type = "Task" [ 683.331456] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.343200] env[68569]: DEBUG oslo_vmware.api [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166674, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.547588] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166672, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075174} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.547888] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 683.548818] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf8c9dbe-1b24-4cf5-9d0a-a2197242a892 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.580048] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] bda7e09b-848b-4d5d-a49d-6e0639f22f99/bda7e09b-848b-4d5d-a49d-6e0639f22f99.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 683.580048] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0d953ae-ea3b-4e07-bfc4-6909d4f66bf6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.604091] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 683.604091] env[68569]: value = "task-3166675" [ 683.604091] env[68569]: _type = "Task" [ 683.604091] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.619439] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166675, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.707110] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166670, 'name': ReconfigVM_Task, 'duration_secs': 0.689813} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.707384] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Reconfigured VM instance instance-0000001b to attach disk [datastore2] volume-b88d036e-c8ce-4222-807f-25716446b927/volume-b88d036e-c8ce-4222-807f-25716446b927.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 683.713149] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eeac15aa-4992-4120-a3ba-6ea6fb12ce2a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.733162] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Waiting for the task: (returnval){ [ 683.733162] env[68569]: value = "task-3166676" [ 683.733162] env[68569]: _type = "Task" [ 683.733162] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.744237] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.909s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.744873] env[68569]: DEBUG nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 683.755472] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166676, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.755472] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.358s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.755472] env[68569]: INFO nova.compute.claims [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 683.755472] env[68569]: DEBUG nova.compute.manager [req-6b07bd67-734c-4045-a606-b8d55106eb45 req-f7b5932e-fac0-4fcd-97d6-905bdae24cff service nova] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Received event network-vif-plugged-67421e24-0bc2-4e26-a6ac-551a8eb20275 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 683.755472] env[68569]: DEBUG oslo_concurrency.lockutils [req-6b07bd67-734c-4045-a606-b8d55106eb45 req-f7b5932e-fac0-4fcd-97d6-905bdae24cff service nova] Acquiring lock "7c887df0-4358-46c5-9682-0d4122e96d10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.755747] env[68569]: DEBUG oslo_concurrency.lockutils [req-6b07bd67-734c-4045-a606-b8d55106eb45 req-f7b5932e-fac0-4fcd-97d6-905bdae24cff service nova] Lock "7c887df0-4358-46c5-9682-0d4122e96d10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.755747] env[68569]: DEBUG oslo_concurrency.lockutils [req-6b07bd67-734c-4045-a606-b8d55106eb45 req-f7b5932e-fac0-4fcd-97d6-905bdae24cff service nova] Lock "7c887df0-4358-46c5-9682-0d4122e96d10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.755747] env[68569]: DEBUG nova.compute.manager [req-6b07bd67-734c-4045-a606-b8d55106eb45 req-f7b5932e-fac0-4fcd-97d6-905bdae24cff service nova] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] No waiting events found dispatching network-vif-plugged-67421e24-0bc2-4e26-a6ac-551a8eb20275 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 683.755948] env[68569]: WARNING nova.compute.manager [req-6b07bd67-734c-4045-a606-b8d55106eb45 req-f7b5932e-fac0-4fcd-97d6-905bdae24cff service nova] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Received unexpected event network-vif-plugged-67421e24-0bc2-4e26-a6ac-551a8eb20275 for instance with vm_state building and task_state spawning. [ 683.757681] env[68569]: DEBUG nova.network.neutron [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Successfully updated port: 67421e24-0bc2-4e26-a6ac-551a8eb20275 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 683.841672] env[68569]: DEBUG oslo_vmware.api [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Task: {'id': task-3166674, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.486926} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.841949] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 683.842150] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 683.842325] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 683.842495] env[68569]: INFO nova.compute.manager [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Took 1.16 seconds to destroy the instance on the hypervisor. [ 683.842740] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 683.843279] env[68569]: DEBUG nova.compute.manager [-] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 683.843279] env[68569]: DEBUG nova.network.neutron [-] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 684.117980] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166675, 'name': ReconfigVM_Task, 'duration_secs': 0.289188} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.118652] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Reconfigured VM instance instance-0000001c to attach disk [datastore2] bda7e09b-848b-4d5d-a49d-6e0639f22f99/bda7e09b-848b-4d5d-a49d-6e0639f22f99.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 684.120501] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c054984-a269-4c97-b3e5-f6ec9979125e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.129347] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 684.129347] env[68569]: value = "task-3166677" [ 684.129347] env[68569]: _type = "Task" [ 684.129347] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.145312] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166677, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.254868] env[68569]: DEBUG nova.compute.utils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 684.256550] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166676, 'name': ReconfigVM_Task, 'duration_secs': 0.18244} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.256889] env[68569]: DEBUG nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 684.257122] env[68569]: DEBUG nova.network.neutron [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 684.262237] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633474', 'volume_id': 'b88d036e-c8ce-4222-807f-25716446b927', 'name': 'volume-b88d036e-c8ce-4222-807f-25716446b927', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2cde3729-1be6-42c5-891f-42a7a8bff267', 'attached_at': '', 'detached_at': '', 'volume_id': 'b88d036e-c8ce-4222-807f-25716446b927', 'serial': 'b88d036e-c8ce-4222-807f-25716446b927'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 684.263009] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Acquiring lock "refresh_cache-7c887df0-4358-46c5-9682-0d4122e96d10" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.263171] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Acquired lock "refresh_cache-7c887df0-4358-46c5-9682-0d4122e96d10" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.263333] env[68569]: DEBUG nova.network.neutron [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 684.264832] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d95d6800-b33f-438f-ac1d-f1f893c2d049 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.278036] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Waiting for the task: (returnval){ [ 684.278036] env[68569]: value = "task-3166678" [ 684.278036] env[68569]: _type = "Task" [ 684.278036] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.287363] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166678, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.336310] env[68569]: DEBUG nova.policy [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c4a384e5a314a83b75928dea39bf78c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10edb645f90c45edbd3aa43bfa24b86e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 684.398535] env[68569]: DEBUG nova.compute.manager [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 684.399999] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62791aa1-bada-4905-9df9-7f774b9f218f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.642548] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166677, 'name': Rename_Task, 'duration_secs': 0.151532} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.642943] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 684.643342] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cdc43b59-90e1-40ca-8a1c-def76bb59601 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.651550] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 684.651550] env[68569]: value = "task-3166679" [ 684.651550] env[68569]: _type = "Task" [ 684.651550] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.661026] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166679, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.774382] env[68569]: DEBUG nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 684.781218] env[68569]: DEBUG nova.network.neutron [-] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.789420] env[68569]: DEBUG nova.network.neutron [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Successfully created port: 925021fa-a59a-4390-b3c2-3290381e5664 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 684.795156] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166678, 'name': Rename_Task, 'duration_secs': 0.134042} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.795563] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 684.795935] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2791098-5c75-490e-8ce5-470b7fa71a94 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.804529] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Waiting for the task: (returnval){ [ 684.804529] env[68569]: value = "task-3166680" [ 684.804529] env[68569]: _type = "Task" [ 684.804529] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.814403] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166680, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.831282] env[68569]: DEBUG nova.network.neutron [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.912846] env[68569]: INFO nova.compute.manager [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] instance snapshotting [ 684.916952] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e52cb2c5-1234-4ad1-b325-7d7b139179b3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.939369] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13758e83-2664-4d6b-bf53-794763277585 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.144701] env[68569]: DEBUG nova.network.neutron [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Updating instance_info_cache with network_info: [{"id": "67421e24-0bc2-4e26-a6ac-551a8eb20275", "address": "fa:16:3e:46:2f:07", "network": {"id": "b079a78f-1e04-404c-a8a5-b1d838377108", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-308487106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1f431a9f7b741219e2edc8fccd9276b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67421e24-0b", "ovs_interfaceid": "67421e24-0bc2-4e26-a6ac-551a8eb20275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.167305] env[68569]: DEBUG oslo_vmware.api [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166679, 'name': PowerOnVM_Task, 'duration_secs': 0.497796} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.171694] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 685.171993] env[68569]: INFO nova.compute.manager [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Took 7.40 seconds to spawn the instance on the hypervisor. [ 685.172258] env[68569]: DEBUG nova.compute.manager [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 685.173595] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea82ee6-c8c9-43d7-b5aa-48b39f617689 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.289767] env[68569]: INFO nova.compute.manager [-] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Took 1.45 seconds to deallocate network for instance. [ 685.322549] env[68569]: DEBUG oslo_vmware.api [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166680, 'name': PowerOnVM_Task, 'duration_secs': 0.476435} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.322825] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 685.325898] env[68569]: INFO nova.compute.manager [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Took 8.59 seconds to spawn the instance on the hypervisor. [ 685.326210] env[68569]: DEBUG nova.compute.manager [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 685.329862] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b5b53e-a7ef-480f-abdd-36d023523aac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.454676] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff3b7db-5243-4727-b513-950c38cb3212 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.464067] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b34a5a-f028-4a73-9dd7-7e86eee13f96 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.467775] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 685.468071] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9e1500ff-fef4-43c0-8e18-e252c67a1496 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.500336] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f725b2-844e-4623-a157-c67b01a879d5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.502641] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 685.502641] env[68569]: value = "task-3166681" [ 685.502641] env[68569]: _type = "Task" [ 685.502641] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.511443] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f320956f-e975-4213-8072-44c63418f9ef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.519293] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166681, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.530802] env[68569]: DEBUG nova.compute.provider_tree [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 685.648138] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Releasing lock "refresh_cache-7c887df0-4358-46c5-9682-0d4122e96d10" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.648842] env[68569]: DEBUG nova.compute.manager [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Instance network_info: |[{"id": "67421e24-0bc2-4e26-a6ac-551a8eb20275", "address": "fa:16:3e:46:2f:07", "network": {"id": "b079a78f-1e04-404c-a8a5-b1d838377108", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-308487106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1f431a9f7b741219e2edc8fccd9276b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67421e24-0b", "ovs_interfaceid": "67421e24-0bc2-4e26-a6ac-551a8eb20275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 685.648934] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:2f:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e7f6f41-f4eb-4832-a390-730fca1cf717', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67421e24-0bc2-4e26-a6ac-551a8eb20275', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 685.656287] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Creating folder: Project (e1f431a9f7b741219e2edc8fccd9276b). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.656602] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-885c0080-743f-4872-970c-999f92244599 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.672304] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Created folder: Project (e1f431a9f7b741219e2edc8fccd9276b) in parent group-v633430. [ 685.672551] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Creating folder: Instances. Parent ref: group-v633518. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.672815] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ec33cec1-6598-4192-9c91-2c78dbec0281 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.687022] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Created folder: Instances in parent group-v633518. [ 685.687022] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 685.687022] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 685.687022] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c14d3df-b7b8-4a48-8158-43f6b0146fdf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.710484] env[68569]: INFO nova.compute.manager [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Took 45.47 seconds to build instance. [ 685.713097] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 685.713097] env[68569]: value = "task-3166684" [ 685.713097] env[68569]: _type = "Task" [ 685.713097] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.723468] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166684, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.790862] env[68569]: DEBUG nova.compute.manager [req-b68c608b-abbf-4997-92af-f0692a936b17 req-b3e37917-8620-4779-b9b3-6d0a0c48e4cd service nova] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Received event network-changed-67421e24-0bc2-4e26-a6ac-551a8eb20275 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 685.790946] env[68569]: DEBUG nova.compute.manager [req-b68c608b-abbf-4997-92af-f0692a936b17 req-b3e37917-8620-4779-b9b3-6d0a0c48e4cd service nova] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Refreshing instance network info cache due to event network-changed-67421e24-0bc2-4e26-a6ac-551a8eb20275. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 685.791227] env[68569]: DEBUG oslo_concurrency.lockutils [req-b68c608b-abbf-4997-92af-f0692a936b17 req-b3e37917-8620-4779-b9b3-6d0a0c48e4cd service nova] Acquiring lock "refresh_cache-7c887df0-4358-46c5-9682-0d4122e96d10" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.791468] env[68569]: DEBUG oslo_concurrency.lockutils [req-b68c608b-abbf-4997-92af-f0692a936b17 req-b3e37917-8620-4779-b9b3-6d0a0c48e4cd service nova] Acquired lock "refresh_cache-7c887df0-4358-46c5-9682-0d4122e96d10" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.791733] env[68569]: DEBUG nova.network.neutron [req-b68c608b-abbf-4997-92af-f0692a936b17 req-b3e37917-8620-4779-b9b3-6d0a0c48e4cd service nova] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Refreshing network info cache for port 67421e24-0bc2-4e26-a6ac-551a8eb20275 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 685.794518] env[68569]: DEBUG nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 685.802264] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.826977] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 685.827988] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 685.827988] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 685.827988] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 685.827988] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 685.828190] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 685.828425] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 685.828607] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 685.828810] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 685.828992] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 685.829221] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 685.830908] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627d6101-b1a9-44ec-9dc8-a136d3c2973d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.839538] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072b5644-d003-4b14-b2aa-66285dfbb5e7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.859312] env[68569]: INFO nova.compute.manager [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Took 46.11 seconds to build instance. [ 686.020091] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166681, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.064163] env[68569]: ERROR nova.scheduler.client.report [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [req-d391aeb5-8de8-49b5-a47f-496dc225291d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d391aeb5-8de8-49b5-a47f-496dc225291d"}]} [ 686.085108] env[68569]: DEBUG nova.scheduler.client.report [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 686.100772] env[68569]: DEBUG nova.scheduler.client.report [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 686.101041] env[68569]: DEBUG nova.compute.provider_tree [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 686.114235] env[68569]: DEBUG nova.scheduler.client.report [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 686.132821] env[68569]: DEBUG nova.scheduler.client.report [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 686.223788] env[68569]: DEBUG oslo_concurrency.lockutils [None req-026f1863-7e4f-403a-abae-bc112dc0c092 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "bda7e09b-848b-4d5d-a49d-6e0639f22f99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.795s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.235487] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166684, 'name': CreateVM_Task, 'duration_secs': 0.368653} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.236397] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 686.237511] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.237511] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.238384] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 686.238384] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd4df3db-f8c3-438c-b31a-715203160abd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.246859] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 686.246859] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524cc125-d9ad-543b-7a5d-066012987edd" [ 686.246859] env[68569]: _type = "Task" [ 686.246859] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.259493] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524cc125-d9ad-543b-7a5d-066012987edd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.362086] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1f865917-8c1f-4ea8-ac29-3cb76dd28efc tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Lock "2cde3729-1be6-42c5-891f-42a7a8bff267" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.963s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.518998] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166681, 'name': CreateSnapshot_Task, 'duration_secs': 0.600175} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.519546] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 686.520014] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f050c22-4e58-4173-83e7-03999c60ed39 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.669366] env[68569]: DEBUG nova.network.neutron [req-b68c608b-abbf-4997-92af-f0692a936b17 req-b3e37917-8620-4779-b9b3-6d0a0c48e4cd service nova] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Updated VIF entry in instance network info cache for port 67421e24-0bc2-4e26-a6ac-551a8eb20275. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 686.673555] env[68569]: DEBUG nova.network.neutron [req-b68c608b-abbf-4997-92af-f0692a936b17 req-b3e37917-8620-4779-b9b3-6d0a0c48e4cd service nova] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Updating instance_info_cache with network_info: [{"id": "67421e24-0bc2-4e26-a6ac-551a8eb20275", "address": "fa:16:3e:46:2f:07", "network": {"id": "b079a78f-1e04-404c-a8a5-b1d838377108", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-308487106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1f431a9f7b741219e2edc8fccd9276b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67421e24-0b", "ovs_interfaceid": "67421e24-0bc2-4e26-a6ac-551a8eb20275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.708204] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace3d8bc-9426-4559-b16b-530fbfcb794f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.717351] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da96044-beb3-4581-a570-c8478b688939 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.751047] env[68569]: DEBUG nova.compute.manager [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 686.758371] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec6236e-9c23-4632-a1c4-89f5468df993 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.770534] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384bcfe9-a573-4989-86c7-b25a8bb07da5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.774436] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524cc125-d9ad-543b-7a5d-066012987edd, 'name': SearchDatastore_Task, 'duration_secs': 0.012897} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.774737] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.774972] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 686.775227] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.775370] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.775547] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 686.777163] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f2ceace-1140-4ea1-92bb-16fc45eebe77 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.793022] env[68569]: DEBUG nova.compute.provider_tree [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 686.793022] env[68569]: DEBUG nova.network.neutron [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Successfully updated port: 925021fa-a59a-4390-b3c2-3290381e5664 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 686.808520] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 686.808716] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 686.810417] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-923394fa-313a-49bc-ac27-7cd70b11882e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.817242] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 686.817242] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5287ce7a-0546-3dbb-b665-8c2d15870699" [ 686.817242] env[68569]: _type = "Task" [ 686.817242] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.826943] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5287ce7a-0546-3dbb-b665-8c2d15870699, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.864629] env[68569]: DEBUG nova.compute.manager [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 687.019437] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.019710] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.050337] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 687.050943] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cc702394-38d7-4c33-8083-9cba08ffad12 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.061832] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 687.061832] env[68569]: value = "task-3166685" [ 687.061832] env[68569]: _type = "Task" [ 687.061832] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.068277] env[68569]: DEBUG nova.compute.manager [req-15bfaddf-4a48-4270-9ad8-0bb6c760b899 req-ff0a9b78-3b25-4dfb-856e-ad3e46e5d9b7 service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Received event network-changed-f467ee4f-f180-4173-a745-c02a31527645 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 687.068332] env[68569]: DEBUG nova.compute.manager [req-15bfaddf-4a48-4270-9ad8-0bb6c760b899 req-ff0a9b78-3b25-4dfb-856e-ad3e46e5d9b7 service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Refreshing instance network info cache due to event network-changed-f467ee4f-f180-4173-a745-c02a31527645. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 687.068513] env[68569]: DEBUG oslo_concurrency.lockutils [req-15bfaddf-4a48-4270-9ad8-0bb6c760b899 req-ff0a9b78-3b25-4dfb-856e-ad3e46e5d9b7 service nova] Acquiring lock "refresh_cache-bda7e09b-848b-4d5d-a49d-6e0639f22f99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.068648] env[68569]: DEBUG oslo_concurrency.lockutils [req-15bfaddf-4a48-4270-9ad8-0bb6c760b899 req-ff0a9b78-3b25-4dfb-856e-ad3e46e5d9b7 service nova] Acquired lock "refresh_cache-bda7e09b-848b-4d5d-a49d-6e0639f22f99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.069091] env[68569]: DEBUG nova.network.neutron [req-15bfaddf-4a48-4270-9ad8-0bb6c760b899 req-ff0a9b78-3b25-4dfb-856e-ad3e46e5d9b7 service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Refreshing network info cache for port f467ee4f-f180-4173-a745-c02a31527645 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 687.073974] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166685, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.175870] env[68569]: DEBUG oslo_concurrency.lockutils [req-b68c608b-abbf-4997-92af-f0692a936b17 req-b3e37917-8620-4779-b9b3-6d0a0c48e4cd service nova] Releasing lock "refresh_cache-7c887df0-4358-46c5-9682-0d4122e96d10" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.176317] env[68569]: DEBUG nova.compute.manager [req-b68c608b-abbf-4997-92af-f0692a936b17 req-b3e37917-8620-4779-b9b3-6d0a0c48e4cd service nova] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Received event network-vif-deleted-e3224bc8-bde5-46e7-aad2-c141a0c0b7b7 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 687.274866] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.299392] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "refresh_cache-ec64b2fd-2409-4af1-8f51-cc0ccbba14f2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.299540] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired lock "refresh_cache-ec64b2fd-2409-4af1-8f51-cc0ccbba14f2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.300487] env[68569]: DEBUG nova.network.neutron [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 687.320348] env[68569]: ERROR nova.scheduler.client.report [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [req-2d0726c2-6217-4315-9ba0-79d718f9ca51] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2d0726c2-6217-4315-9ba0-79d718f9ca51"}]} [ 687.333526] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5287ce7a-0546-3dbb-b665-8c2d15870699, 'name': SearchDatastore_Task, 'duration_secs': 0.02356} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.334433] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-697111bb-bee2-4c20-bbf3-ba17b4aa0e68 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.339794] env[68569]: DEBUG nova.scheduler.client.report [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 687.343986] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 687.343986] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5218f147-4656-4cb4-e529-8a77ea35ff62" [ 687.343986] env[68569]: _type = "Task" [ 687.343986] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.354069] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5218f147-4656-4cb4-e529-8a77ea35ff62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.356072] env[68569]: DEBUG nova.scheduler.client.report [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 687.356287] env[68569]: DEBUG nova.compute.provider_tree [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 687.375316] env[68569]: DEBUG nova.scheduler.client.report [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 687.386985] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.395384] env[68569]: DEBUG nova.scheduler.client.report [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 687.578774] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166685, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.716549] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "bda7e09b-848b-4d5d-a49d-6e0639f22f99" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.716871] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "bda7e09b-848b-4d5d-a49d-6e0639f22f99" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.717196] env[68569]: INFO nova.compute.manager [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Rebooting instance [ 687.858050] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5218f147-4656-4cb4-e529-8a77ea35ff62, 'name': SearchDatastore_Task, 'duration_secs': 0.013065} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.860954] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.861181] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 7c887df0-4358-46c5-9682-0d4122e96d10/7c887df0-4358-46c5-9682-0d4122e96d10.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 687.862330] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-227199dc-ec04-4987-b97e-be27652aba85 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.868209] env[68569]: DEBUG nova.compute.manager [req-41cc354b-80be-420a-8bce-c31823fcca83 req-865d2920-3be4-450c-a0a3-f3ee527cab66 service nova] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Received event network-vif-plugged-925021fa-a59a-4390-b3c2-3290381e5664 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 687.868380] env[68569]: DEBUG oslo_concurrency.lockutils [req-41cc354b-80be-420a-8bce-c31823fcca83 req-865d2920-3be4-450c-a0a3-f3ee527cab66 service nova] Acquiring lock "ec64b2fd-2409-4af1-8f51-cc0ccbba14f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.868585] env[68569]: DEBUG oslo_concurrency.lockutils [req-41cc354b-80be-420a-8bce-c31823fcca83 req-865d2920-3be4-450c-a0a3-f3ee527cab66 service nova] Lock "ec64b2fd-2409-4af1-8f51-cc0ccbba14f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.868752] env[68569]: DEBUG oslo_concurrency.lockutils [req-41cc354b-80be-420a-8bce-c31823fcca83 req-865d2920-3be4-450c-a0a3-f3ee527cab66 service nova] Lock "ec64b2fd-2409-4af1-8f51-cc0ccbba14f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.868933] env[68569]: DEBUG nova.compute.manager [req-41cc354b-80be-420a-8bce-c31823fcca83 req-865d2920-3be4-450c-a0a3-f3ee527cab66 service nova] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] No waiting events found dispatching network-vif-plugged-925021fa-a59a-4390-b3c2-3290381e5664 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 687.869117] env[68569]: WARNING nova.compute.manager [req-41cc354b-80be-420a-8bce-c31823fcca83 req-865d2920-3be4-450c-a0a3-f3ee527cab66 service nova] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Received unexpected event network-vif-plugged-925021fa-a59a-4390-b3c2-3290381e5664 for instance with vm_state building and task_state spawning. [ 687.869277] env[68569]: DEBUG nova.compute.manager [req-41cc354b-80be-420a-8bce-c31823fcca83 req-865d2920-3be4-450c-a0a3-f3ee527cab66 service nova] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Received event network-changed-925021fa-a59a-4390-b3c2-3290381e5664 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 687.869426] env[68569]: DEBUG nova.compute.manager [req-41cc354b-80be-420a-8bce-c31823fcca83 req-865d2920-3be4-450c-a0a3-f3ee527cab66 service nova] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Refreshing instance network info cache due to event network-changed-925021fa-a59a-4390-b3c2-3290381e5664. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 687.869762] env[68569]: DEBUG oslo_concurrency.lockutils [req-41cc354b-80be-420a-8bce-c31823fcca83 req-865d2920-3be4-450c-a0a3-f3ee527cab66 service nova] Acquiring lock "refresh_cache-ec64b2fd-2409-4af1-8f51-cc0ccbba14f2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.870599] env[68569]: DEBUG nova.network.neutron [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 687.880212] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 687.880212] env[68569]: value = "task-3166686" [ 687.880212] env[68569]: _type = "Task" [ 687.880212] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.891641] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166686, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.982177] env[68569]: DEBUG nova.network.neutron [req-15bfaddf-4a48-4270-9ad8-0bb6c760b899 req-ff0a9b78-3b25-4dfb-856e-ad3e46e5d9b7 service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Updated VIF entry in instance network info cache for port f467ee4f-f180-4173-a745-c02a31527645. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 687.982581] env[68569]: DEBUG nova.network.neutron [req-15bfaddf-4a48-4270-9ad8-0bb6c760b899 req-ff0a9b78-3b25-4dfb-856e-ad3e46e5d9b7 service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Updating instance_info_cache with network_info: [{"id": "f467ee4f-f180-4173-a745-c02a31527645", "address": "fa:16:3e:a9:29:c7", "network": {"id": "5f571618-9e86-414b-9df9-ef4f3e050fda", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-716941586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef27055c27a04f7e9199b9c02efa7fcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf467ee4f-f1", "ovs_interfaceid": "f467ee4f-f180-4173-a745-c02a31527645", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.003884] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b44d501-3456-43c0-bef1-6b80ba899026 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.014913] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef812b6-6592-4f9c-91d8-0f974477c325 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.052133] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1048fae-69b9-46a3-af15-9139388e1215 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.061848] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d9ad21-9377-4d6c-9942-e1036a8ed32d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.085323] env[68569]: DEBUG nova.compute.provider_tree [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 688.086686] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166685, 'name': CloneVM_Task} progress is 95%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.097983] env[68569]: DEBUG nova.network.neutron [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Updating instance_info_cache with network_info: [{"id": "925021fa-a59a-4390-b3c2-3290381e5664", "address": "fa:16:3e:a5:14:04", "network": {"id": "a4dfe87c-fd90-4643-a4b6-43c5de9bb65b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-789473346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10edb645f90c45edbd3aa43bfa24b86e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925021fa-a5", "ovs_interfaceid": "925021fa-a59a-4390-b3c2-3290381e5664", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.251487] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "refresh_cache-bda7e09b-848b-4d5d-a49d-6e0639f22f99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.392922] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166686, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.488600] env[68569]: DEBUG oslo_concurrency.lockutils [req-15bfaddf-4a48-4270-9ad8-0bb6c760b899 req-ff0a9b78-3b25-4dfb-856e-ad3e46e5d9b7 service nova] Releasing lock "refresh_cache-bda7e09b-848b-4d5d-a49d-6e0639f22f99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.489383] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquired lock "refresh_cache-bda7e09b-848b-4d5d-a49d-6e0639f22f99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.489383] env[68569]: DEBUG nova.network.neutron [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 688.575959] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166685, 'name': CloneVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.604055] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Releasing lock "refresh_cache-ec64b2fd-2409-4af1-8f51-cc0ccbba14f2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.605083] env[68569]: DEBUG nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Instance network_info: |[{"id": "925021fa-a59a-4390-b3c2-3290381e5664", "address": "fa:16:3e:a5:14:04", "network": {"id": "a4dfe87c-fd90-4643-a4b6-43c5de9bb65b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-789473346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10edb645f90c45edbd3aa43bfa24b86e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925021fa-a5", "ovs_interfaceid": "925021fa-a59a-4390-b3c2-3290381e5664", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 688.605083] env[68569]: DEBUG oslo_concurrency.lockutils [req-41cc354b-80be-420a-8bce-c31823fcca83 req-865d2920-3be4-450c-a0a3-f3ee527cab66 service nova] Acquired lock "refresh_cache-ec64b2fd-2409-4af1-8f51-cc0ccbba14f2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.605353] env[68569]: DEBUG nova.network.neutron [req-41cc354b-80be-420a-8bce-c31823fcca83 req-865d2920-3be4-450c-a0a3-f3ee527cab66 service nova] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Refreshing network info cache for port 925021fa-a59a-4390-b3c2-3290381e5664 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 688.607732] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:14:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e6db039c-542c-4544-a57d-ddcc6c1e8e45', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '925021fa-a59a-4390-b3c2-3290381e5664', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 688.625723] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Creating folder: Project (10edb645f90c45edbd3aa43bfa24b86e). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 688.628389] env[68569]: DEBUG nova.scheduler.client.report [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 61 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 688.628738] env[68569]: DEBUG nova.compute.provider_tree [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 61 to 62 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 688.629054] env[68569]: DEBUG nova.compute.provider_tree [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 688.634611] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-252ff6b0-5067-47ce-a801-149e32afe68e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.648749] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Created folder: Project (10edb645f90c45edbd3aa43bfa24b86e) in parent group-v633430. [ 688.649088] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Creating folder: Instances. Parent ref: group-v633523. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 688.649730] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5067dd37-889e-4b31-9dd9-31ba620c99f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.661737] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Created folder: Instances in parent group-v633523. [ 688.662088] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 688.662364] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 688.662964] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9685c92d-8a11-4532-9f17-ec32db40f642 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.696077] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 688.696077] env[68569]: value = "task-3166689" [ 688.696077] env[68569]: _type = "Task" [ 688.696077] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.707633] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166689, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.895389] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166686, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.664126} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.895812] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 7c887df0-4358-46c5-9682-0d4122e96d10/7c887df0-4358-46c5-9682-0d4122e96d10.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 688.896094] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 688.896421] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-50d433fc-18b5-4ec6-88f3-8f093c16ccd7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.905458] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 688.905458] env[68569]: value = "task-3166690" [ 688.905458] env[68569]: _type = "Task" [ 688.905458] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.914503] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166690, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.077432] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166685, 'name': CloneVM_Task, 'duration_secs': 1.523303} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.077849] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Created linked-clone VM from snapshot [ 689.078653] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f98b28-beba-4f07-964d-9eaaac4eea14 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.089411] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Uploading image 8aaa5fed-efbe-4437-b896-3e81de63e0fd {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 689.111964] env[68569]: DEBUG oslo_vmware.rw_handles [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 689.111964] env[68569]: value = "vm-633522" [ 689.111964] env[68569]: _type = "VirtualMachine" [ 689.111964] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 689.112262] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-248a4680-9b32-46af-9cc8-13210a0f49c1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.116100] env[68569]: DEBUG nova.compute.manager [req-5efeff85-5600-4238-ad6d-68f6059cd1fd req-13ceed4e-7ad9-41eb-802a-3fb18a81675e service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Received event network-changed-b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 689.116302] env[68569]: DEBUG nova.compute.manager [req-5efeff85-5600-4238-ad6d-68f6059cd1fd req-13ceed4e-7ad9-41eb-802a-3fb18a81675e service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Refreshing instance network info cache due to event network-changed-b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 689.116508] env[68569]: DEBUG oslo_concurrency.lockutils [req-5efeff85-5600-4238-ad6d-68f6059cd1fd req-13ceed4e-7ad9-41eb-802a-3fb18a81675e service nova] Acquiring lock "refresh_cache-2cde3729-1be6-42c5-891f-42a7a8bff267" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.116650] env[68569]: DEBUG oslo_concurrency.lockutils [req-5efeff85-5600-4238-ad6d-68f6059cd1fd req-13ceed4e-7ad9-41eb-802a-3fb18a81675e service nova] Acquired lock "refresh_cache-2cde3729-1be6-42c5-891f-42a7a8bff267" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.116804] env[68569]: DEBUG nova.network.neutron [req-5efeff85-5600-4238-ad6d-68f6059cd1fd req-13ceed4e-7ad9-41eb-802a-3fb18a81675e service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Refreshing network info cache for port b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 689.126258] env[68569]: DEBUG oslo_vmware.rw_handles [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lease: (returnval){ [ 689.126258] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521daca5-4e02-b91f-9545-e736109ac1b9" [ 689.126258] env[68569]: _type = "HttpNfcLease" [ 689.126258] env[68569]: } obtained for exporting VM: (result){ [ 689.126258] env[68569]: value = "vm-633522" [ 689.126258] env[68569]: _type = "VirtualMachine" [ 689.126258] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 689.126258] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the lease: (returnval){ [ 689.126258] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521daca5-4e02-b91f-9545-e736109ac1b9" [ 689.126258] env[68569]: _type = "HttpNfcLease" [ 689.126258] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 689.138017] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.385s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.138017] env[68569]: DEBUG nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 689.140300] env[68569]: DEBUG oslo_concurrency.lockutils [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.640s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.140676] env[68569]: DEBUG nova.objects.instance [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lazy-loading 'resources' on Instance uuid 40b768c1-9007-4f78-a90f-61b2ac64553f {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 689.142293] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 689.142293] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521daca5-4e02-b91f-9545-e736109ac1b9" [ 689.142293] env[68569]: _type = "HttpNfcLease" [ 689.142293] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 689.208766] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166689, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.287337] env[68569]: DEBUG nova.network.neutron [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Updating instance_info_cache with network_info: [{"id": "f467ee4f-f180-4173-a745-c02a31527645", "address": "fa:16:3e:a9:29:c7", "network": {"id": "5f571618-9e86-414b-9df9-ef4f3e050fda", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-716941586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef27055c27a04f7e9199b9c02efa7fcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf467ee4f-f1", "ovs_interfaceid": "f467ee4f-f180-4173-a745-c02a31527645", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.415793] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166690, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068205} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.416124] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 689.416964] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401a9370-480b-4e4e-b6ff-088ac58d3314 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.449228] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] 7c887df0-4358-46c5-9682-0d4122e96d10/7c887df0-4358-46c5-9682-0d4122e96d10.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 689.449556] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14f14f34-767b-4d49-a7d7-f74ebd124e64 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.471610] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 689.471610] env[68569]: value = "task-3166692" [ 689.471610] env[68569]: _type = "Task" [ 689.471610] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.486748] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166692, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.624922] env[68569]: DEBUG nova.network.neutron [req-41cc354b-80be-420a-8bce-c31823fcca83 req-865d2920-3be4-450c-a0a3-f3ee527cab66 service nova] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Updated VIF entry in instance network info cache for port 925021fa-a59a-4390-b3c2-3290381e5664. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 689.625324] env[68569]: DEBUG nova.network.neutron [req-41cc354b-80be-420a-8bce-c31823fcca83 req-865d2920-3be4-450c-a0a3-f3ee527cab66 service nova] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Updating instance_info_cache with network_info: [{"id": "925021fa-a59a-4390-b3c2-3290381e5664", "address": "fa:16:3e:a5:14:04", "network": {"id": "a4dfe87c-fd90-4643-a4b6-43c5de9bb65b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-789473346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10edb645f90c45edbd3aa43bfa24b86e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925021fa-a5", "ovs_interfaceid": "925021fa-a59a-4390-b3c2-3290381e5664", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.634269] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 689.634269] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521daca5-4e02-b91f-9545-e736109ac1b9" [ 689.634269] env[68569]: _type = "HttpNfcLease" [ 689.634269] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 689.634518] env[68569]: DEBUG oslo_vmware.rw_handles [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 689.634518] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521daca5-4e02-b91f-9545-e736109ac1b9" [ 689.634518] env[68569]: _type = "HttpNfcLease" [ 689.634518] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 689.635244] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624a8c06-583a-423c-8ee4-4b2e5ca77085 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.644977] env[68569]: DEBUG nova.compute.utils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 689.648866] env[68569]: DEBUG oslo_vmware.rw_handles [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528feea4-1932-f4e4-a199-5f68188a341f/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 689.649047] env[68569]: DEBUG oslo_vmware.rw_handles [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528feea4-1932-f4e4-a199-5f68188a341f/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 689.650648] env[68569]: DEBUG nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 689.650821] env[68569]: DEBUG nova.network.neutron [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 689.711014] env[68569]: DEBUG nova.policy [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c4a384e5a314a83b75928dea39bf78c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10edb645f90c45edbd3aa43bfa24b86e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 689.725283] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166689, 'name': CreateVM_Task, 'duration_secs': 0.840489} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.725457] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 689.726161] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.726336] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.726641] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 689.726896] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ac595e1-4d2b-418f-8396-81ec9922c255 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.736089] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 689.736089] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f75595-5adc-3907-ccb8-e01fc1550109" [ 689.736089] env[68569]: _type = "Task" [ 689.736089] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.745746] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f75595-5adc-3907-ccb8-e01fc1550109, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.751792] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1e9a132e-f0c1-4779-8744-2f5fa2b57ff5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.790637] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Releasing lock "refresh_cache-bda7e09b-848b-4d5d-a49d-6e0639f22f99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.988059] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166692, 'name': ReconfigVM_Task, 'duration_secs': 0.276973} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.991028] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Reconfigured VM instance instance-0000001d to attach disk [datastore2] 7c887df0-4358-46c5-9682-0d4122e96d10/7c887df0-4358-46c5-9682-0d4122e96d10.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 689.991884] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5be0467a-35db-44f7-b437-7dd7368635ed {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.999775] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 689.999775] env[68569]: value = "task-3166693" [ 689.999775] env[68569]: _type = "Task" [ 689.999775] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.009379] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166693, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.046363] env[68569]: DEBUG nova.network.neutron [req-5efeff85-5600-4238-ad6d-68f6059cd1fd req-13ceed4e-7ad9-41eb-802a-3fb18a81675e service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Updated VIF entry in instance network info cache for port b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 690.046704] env[68569]: DEBUG nova.network.neutron [req-5efeff85-5600-4238-ad6d-68f6059cd1fd req-13ceed4e-7ad9-41eb-802a-3fb18a81675e service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Updating instance_info_cache with network_info: [{"id": "b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4", "address": "fa:16:3e:17:3e:41", "network": {"id": "c0d55a28-4420-4fa0-85dc-7e503881d235", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1758956017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "378a536d9ff14836aa7f971498835b24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8051e6d-d6", "ovs_interfaceid": "b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.131514] env[68569]: DEBUG oslo_concurrency.lockutils [req-41cc354b-80be-420a-8bce-c31823fcca83 req-865d2920-3be4-450c-a0a3-f3ee527cab66 service nova] Releasing lock "refresh_cache-ec64b2fd-2409-4af1-8f51-cc0ccbba14f2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.155055] env[68569]: DEBUG nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 690.208917] env[68569]: DEBUG nova.network.neutron [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Successfully created port: 15a27b78-8a00-40e9-af61-368e8645b1ed {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 690.248680] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f75595-5adc-3907-ccb8-e01fc1550109, 'name': SearchDatastore_Task, 'duration_secs': 0.01322} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.252689] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.253417] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 690.254025] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.254025] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.254274] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 690.254777] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ec5d059-8cb6-48fe-9f16-72d9522184b9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.265271] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 690.265556] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 690.266814] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fbd81ac-2ee3-4a06-8af9-a5b3d80b3cf6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.274459] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 690.274459] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527932bd-bbcd-11a3-40c2-17823e4617de" [ 690.274459] env[68569]: _type = "Task" [ 690.274459] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.290568] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527932bd-bbcd-11a3-40c2-17823e4617de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.296526] env[68569]: DEBUG nova.compute.manager [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 690.297648] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac1b28f-b699-4226-87f4-4636c9975261 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.367989] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c949f89-088c-4a7d-ac68-e0b9f8d4b3be {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.376840] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d809b73b-0bac-45c0-b45d-fdaa3178d647 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.410267] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb61897-d363-49ad-9064-63271b58b37d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.424290] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18664d0-92c2-44b8-b7d2-e8a44f9a587b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.442162] env[68569]: DEBUG nova.compute.provider_tree [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.510243] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166693, 'name': Rename_Task, 'duration_secs': 0.196409} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.510889] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 690.510982] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3797468d-5f76-4394-89e7-9407a9dcc364 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.518630] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 690.518630] env[68569]: value = "task-3166694" [ 690.518630] env[68569]: _type = "Task" [ 690.518630] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.529912] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166694, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.549717] env[68569]: DEBUG oslo_concurrency.lockutils [req-5efeff85-5600-4238-ad6d-68f6059cd1fd req-13ceed4e-7ad9-41eb-802a-3fb18a81675e service nova] Releasing lock "refresh_cache-2cde3729-1be6-42c5-891f-42a7a8bff267" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.785437] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527932bd-bbcd-11a3-40c2-17823e4617de, 'name': SearchDatastore_Task, 'duration_secs': 0.014263} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.786253] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e03f5fad-9864-44b8-b6fe-a5c406c98a5d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.792680] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 690.792680] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521575d0-af7a-6435-292d-62f9c8322464" [ 690.792680] env[68569]: _type = "Task" [ 690.792680] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.801138] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521575d0-af7a-6435-292d-62f9c8322464, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.945562] env[68569]: DEBUG nova.scheduler.client.report [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 691.029355] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166694, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.165824] env[68569]: DEBUG nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 691.191657] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 691.192165] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 691.193028] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 691.193670] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 691.193949] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 691.194241] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 691.194581] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 691.194795] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 691.195050] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 691.195327] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 691.195517] env[68569]: DEBUG nova.virt.hardware [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 691.196644] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232a1558-9d28-4c36-8195-51a156077de2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.206887] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe25a9c6-0a70-433f-98ee-7de640a84478 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.303937] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521575d0-af7a-6435-292d-62f9c8322464, 'name': SearchDatastore_Task, 'duration_secs': 0.013235} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.304793] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.304793] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] ec64b2fd-2409-4af1-8f51-cc0ccbba14f2/ec64b2fd-2409-4af1-8f51-cc0ccbba14f2.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 691.306036] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df275216-5b54-4e13-99af-4c5c3657e132 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.313649] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 691.313649] env[68569]: value = "task-3166695" [ 691.313649] env[68569]: _type = "Task" [ 691.313649] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.318651] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78805fd6-e554-49a3-b61d-ffdb43bbf62c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.327182] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166695, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.329662] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Doing hard reboot of VM {{(pid=68569) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 691.330380] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-a5a830ec-3bcb-431c-a30a-5c54012691dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.339526] env[68569]: DEBUG oslo_vmware.api [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 691.339526] env[68569]: value = "task-3166696" [ 691.339526] env[68569]: _type = "Task" [ 691.339526] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.350866] env[68569]: DEBUG oslo_vmware.api [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166696, 'name': ResetVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.452036] env[68569]: DEBUG oslo_concurrency.lockutils [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.312s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.454867] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.388s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.456254] env[68569]: DEBUG nova.objects.instance [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68569) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 691.475166] env[68569]: INFO nova.scheduler.client.report [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Deleted allocations for instance 40b768c1-9007-4f78-a90f-61b2ac64553f [ 691.531144] env[68569]: DEBUG oslo_vmware.api [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166694, 'name': PowerOnVM_Task, 'duration_secs': 0.737508} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.531384] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 691.531663] env[68569]: INFO nova.compute.manager [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Took 8.66 seconds to spawn the instance on the hypervisor. [ 691.532037] env[68569]: DEBUG nova.compute.manager [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 691.533183] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa620a6-d4eb-404e-b269-74bff7c03828 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.827581] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166695, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.852492] env[68569]: DEBUG oslo_vmware.api [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166696, 'name': ResetVM_Task, 'duration_secs': 0.104689} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.852784] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Did hard reboot of VM {{(pid=68569) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 691.852984] env[68569]: DEBUG nova.compute.manager [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 691.853814] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2a2bb2-5084-4121-a8ac-9f8f170c3997 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.876104] env[68569]: DEBUG nova.compute.manager [req-4a33c1b0-a4e3-4bdd-b265-62f962b27128 req-6370ee73-8ee2-473c-860d-2f4fa19452aa service nova] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Received event network-vif-plugged-15a27b78-8a00-40e9-af61-368e8645b1ed {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 691.876330] env[68569]: DEBUG oslo_concurrency.lockutils [req-4a33c1b0-a4e3-4bdd-b265-62f962b27128 req-6370ee73-8ee2-473c-860d-2f4fa19452aa service nova] Acquiring lock "78f486aa-80f4-4d43-bd00-cc6206517a72-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.876547] env[68569]: DEBUG oslo_concurrency.lockutils [req-4a33c1b0-a4e3-4bdd-b265-62f962b27128 req-6370ee73-8ee2-473c-860d-2f4fa19452aa service nova] Lock "78f486aa-80f4-4d43-bd00-cc6206517a72-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.876836] env[68569]: DEBUG oslo_concurrency.lockutils [req-4a33c1b0-a4e3-4bdd-b265-62f962b27128 req-6370ee73-8ee2-473c-860d-2f4fa19452aa service nova] Lock "78f486aa-80f4-4d43-bd00-cc6206517a72-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.877094] env[68569]: DEBUG nova.compute.manager [req-4a33c1b0-a4e3-4bdd-b265-62f962b27128 req-6370ee73-8ee2-473c-860d-2f4fa19452aa service nova] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] No waiting events found dispatching network-vif-plugged-15a27b78-8a00-40e9-af61-368e8645b1ed {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 691.877335] env[68569]: WARNING nova.compute.manager [req-4a33c1b0-a4e3-4bdd-b265-62f962b27128 req-6370ee73-8ee2-473c-860d-2f4fa19452aa service nova] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Received unexpected event network-vif-plugged-15a27b78-8a00-40e9-af61-368e8645b1ed for instance with vm_state building and task_state spawning. [ 691.984537] env[68569]: DEBUG oslo_concurrency.lockutils [None req-99e92a40-4087-4ed5-8c78-c0bdf7308787 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "40b768c1-9007-4f78-a90f-61b2ac64553f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.784s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.050705] env[68569]: DEBUG nova.network.neutron [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Successfully updated port: 15a27b78-8a00-40e9-af61-368e8645b1ed {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 692.054802] env[68569]: INFO nova.compute.manager [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Took 47.82 seconds to build instance. [ 692.327350] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166695, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.667538} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.327745] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] ec64b2fd-2409-4af1-8f51-cc0ccbba14f2/ec64b2fd-2409-4af1-8f51-cc0ccbba14f2.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 692.328127] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 692.328566] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-94622050-a30f-45c0-9b9f-a3a693ac3e27 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.341572] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 692.341572] env[68569]: value = "task-3166697" [ 692.341572] env[68569]: _type = "Task" [ 692.341572] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.352329] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166697, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.370673] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fef80b15-d288-4779-8a80-31f5e3500336 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "bda7e09b-848b-4d5d-a49d-6e0639f22f99" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.654s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.469902] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72734ac1-77b5-46a6-92e8-a554cdecfbe6 tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.471602] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.237s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.473821] env[68569]: INFO nova.compute.claims [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 692.554287] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "refresh_cache-78f486aa-80f4-4d43-bd00-cc6206517a72" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.554379] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired lock "refresh_cache-78f486aa-80f4-4d43-bd00-cc6206517a72" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.554544] env[68569]: DEBUG nova.network.neutron [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.557518] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4045c741-0ca2-4b93-a6cb-34c958b6117e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Lock "7c887df0-4358-46c5-9682-0d4122e96d10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.310s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.842995] env[68569]: DEBUG oslo_concurrency.lockutils [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "3ee3365b-0799-414b-b2a1-1d219bd9db96" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.843323] env[68569]: DEBUG oslo_concurrency.lockutils [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "3ee3365b-0799-414b-b2a1-1d219bd9db96" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.843535] env[68569]: DEBUG oslo_concurrency.lockutils [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "3ee3365b-0799-414b-b2a1-1d219bd9db96-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.844218] env[68569]: DEBUG oslo_concurrency.lockutils [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "3ee3365b-0799-414b-b2a1-1d219bd9db96-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.844405] env[68569]: DEBUG oslo_concurrency.lockutils [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "3ee3365b-0799-414b-b2a1-1d219bd9db96-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.848381] env[68569]: INFO nova.compute.manager [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Terminating instance [ 692.861660] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166697, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.111926} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.861816] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 692.862613] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27466cbd-c03f-4e87-8f87-b009b383db86 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.888726] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] ec64b2fd-2409-4af1-8f51-cc0ccbba14f2/ec64b2fd-2409-4af1-8f51-cc0ccbba14f2.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 692.889901] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7a00320-4a08-4726-8cda-7b528f6c4ef5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.912580] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 692.912580] env[68569]: value = "task-3166698" [ 692.912580] env[68569]: _type = "Task" [ 692.912580] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.922404] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166698, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.059607] env[68569]: DEBUG nova.compute.manager [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 693.110358] env[68569]: DEBUG nova.network.neutron [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.295048] env[68569]: DEBUG nova.network.neutron [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Updating instance_info_cache with network_info: [{"id": "15a27b78-8a00-40e9-af61-368e8645b1ed", "address": "fa:16:3e:64:5e:b9", "network": {"id": "a4dfe87c-fd90-4643-a4b6-43c5de9bb65b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-789473346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10edb645f90c45edbd3aa43bfa24b86e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15a27b78-8a", "ovs_interfaceid": "15a27b78-8a00-40e9-af61-368e8645b1ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.356547] env[68569]: DEBUG nova.compute.manager [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 693.356826] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 693.357904] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371bb961-fe5f-4bd8-85ea-6fd8bc0c67cb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.366979] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 693.367285] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-520d7118-4cf3-44c9-bdc2-4c4104de8d25 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.374279] env[68569]: DEBUG oslo_vmware.api [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 693.374279] env[68569]: value = "task-3166699" [ 693.374279] env[68569]: _type = "Task" [ 693.374279] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.386179] env[68569]: DEBUG oslo_vmware.api [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166699, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.422628] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166698, 'name': ReconfigVM_Task, 'duration_secs': 0.451486} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.422919] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Reconfigured VM instance instance-0000001e to attach disk [datastore2] ec64b2fd-2409-4af1-8f51-cc0ccbba14f2/ec64b2fd-2409-4af1-8f51-cc0ccbba14f2.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 693.423607] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49e0b30a-acfa-4e73-8f2d-1fa7b9e62a7b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.433167] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 693.433167] env[68569]: value = "task-3166700" [ 693.433167] env[68569]: _type = "Task" [ 693.433167] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.442697] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166700, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.546604] env[68569]: DEBUG nova.compute.manager [req-6e17f0b3-481c-4f35-a66c-91c1191fd94c req-633847e4-5ee5-471f-82d8-1d6bbeea9beb service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Received event network-changed-f467ee4f-f180-4173-a745-c02a31527645 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 693.547505] env[68569]: DEBUG nova.compute.manager [req-6e17f0b3-481c-4f35-a66c-91c1191fd94c req-633847e4-5ee5-471f-82d8-1d6bbeea9beb service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Refreshing instance network info cache due to event network-changed-f467ee4f-f180-4173-a745-c02a31527645. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 693.547505] env[68569]: DEBUG oslo_concurrency.lockutils [req-6e17f0b3-481c-4f35-a66c-91c1191fd94c req-633847e4-5ee5-471f-82d8-1d6bbeea9beb service nova] Acquiring lock "refresh_cache-bda7e09b-848b-4d5d-a49d-6e0639f22f99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.547505] env[68569]: DEBUG oslo_concurrency.lockutils [req-6e17f0b3-481c-4f35-a66c-91c1191fd94c req-633847e4-5ee5-471f-82d8-1d6bbeea9beb service nova] Acquired lock "refresh_cache-bda7e09b-848b-4d5d-a49d-6e0639f22f99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.547505] env[68569]: DEBUG nova.network.neutron [req-6e17f0b3-481c-4f35-a66c-91c1191fd94c req-633847e4-5ee5-471f-82d8-1d6bbeea9beb service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Refreshing network info cache for port f467ee4f-f180-4173-a745-c02a31527645 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 693.583539] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.651148] env[68569]: DEBUG nova.compute.manager [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 693.652980] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d678caa1-5582-45fa-a469-1972f657c92a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.797718] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Releasing lock "refresh_cache-78f486aa-80f4-4d43-bd00-cc6206517a72" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.798068] env[68569]: DEBUG nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Instance network_info: |[{"id": "15a27b78-8a00-40e9-af61-368e8645b1ed", "address": "fa:16:3e:64:5e:b9", "network": {"id": "a4dfe87c-fd90-4643-a4b6-43c5de9bb65b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-789473346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10edb645f90c45edbd3aa43bfa24b86e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15a27b78-8a", "ovs_interfaceid": "15a27b78-8a00-40e9-af61-368e8645b1ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 693.798516] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:5e:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e6db039c-542c-4544-a57d-ddcc6c1e8e45', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15a27b78-8a00-40e9-af61-368e8645b1ed', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 693.806157] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 693.809078] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 693.810420] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-214a0fda-78ba-4cb5-8f55-40132afa6a96 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.834538] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 693.834538] env[68569]: value = "task-3166701" [ 693.834538] env[68569]: _type = "Task" [ 693.834538] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.846161] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166701, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.884818] env[68569]: DEBUG oslo_vmware.api [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166699, 'name': PowerOffVM_Task, 'duration_secs': 0.265437} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.887765] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 693.887974] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 693.889045] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7437063-331d-4605-b2a5-7ee0a39ff174 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.948466] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166700, 'name': Rename_Task, 'duration_secs': 0.263812} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.949689] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 693.949689] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-502d485e-e06c-4c99-ab1f-0ee87ae14e38 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.959379] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 693.959379] env[68569]: value = "task-3166703" [ 693.959379] env[68569]: _type = "Task" [ 693.959379] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.974730] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 693.975045] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 693.975275] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Deleting the datastore file [datastore1] 3ee3365b-0799-414b-b2a1-1d219bd9db96 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 693.975594] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c9a706b-9498-4573-b32b-4789ef2ef673 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.983620] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166703, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.991133] env[68569]: DEBUG oslo_vmware.api [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for the task: (returnval){ [ 693.991133] env[68569]: value = "task-3166704" [ 693.991133] env[68569]: _type = "Task" [ 693.991133] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.004610] env[68569]: DEBUG oslo_vmware.api [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166704, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.010954] env[68569]: DEBUG nova.compute.manager [req-39737c26-1a33-497c-8de8-9569fe99bc0b req-877e8dc0-f123-4163-b975-1f8db1a2077b service nova] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Received event network-changed-15a27b78-8a00-40e9-af61-368e8645b1ed {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 694.011195] env[68569]: DEBUG nova.compute.manager [req-39737c26-1a33-497c-8de8-9569fe99bc0b req-877e8dc0-f123-4163-b975-1f8db1a2077b service nova] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Refreshing instance network info cache due to event network-changed-15a27b78-8a00-40e9-af61-368e8645b1ed. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 694.011606] env[68569]: DEBUG oslo_concurrency.lockutils [req-39737c26-1a33-497c-8de8-9569fe99bc0b req-877e8dc0-f123-4163-b975-1f8db1a2077b service nova] Acquiring lock "refresh_cache-78f486aa-80f4-4d43-bd00-cc6206517a72" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.011606] env[68569]: DEBUG oslo_concurrency.lockutils [req-39737c26-1a33-497c-8de8-9569fe99bc0b req-877e8dc0-f123-4163-b975-1f8db1a2077b service nova] Acquired lock "refresh_cache-78f486aa-80f4-4d43-bd00-cc6206517a72" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.011828] env[68569]: DEBUG nova.network.neutron [req-39737c26-1a33-497c-8de8-9569fe99bc0b req-877e8dc0-f123-4163-b975-1f8db1a2077b service nova] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Refreshing network info cache for port 15a27b78-8a00-40e9-af61-368e8645b1ed {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 694.115047] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f3b13a-94a2-4952-8d8c-d6b99cd75806 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.124832] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefe5281-6ec7-4483-a831-0321ea5790e2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.161424] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155c1909-2c0c-4440-b363-020162a3018c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.167050] env[68569]: INFO nova.compute.manager [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] instance snapshotting [ 694.172195] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37c06d9-4fe0-4a68-9129-919cb38b3236 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.176766] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cced22b3-254f-47a7-bf15-f9e0bcfc4e99 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.179706] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "bda7e09b-848b-4d5d-a49d-6e0639f22f99" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.179940] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "bda7e09b-848b-4d5d-a49d-6e0639f22f99" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.180284] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "bda7e09b-848b-4d5d-a49d-6e0639f22f99-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.180565] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "bda7e09b-848b-4d5d-a49d-6e0639f22f99-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.180740] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "bda7e09b-848b-4d5d-a49d-6e0639f22f99-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.194667] env[68569]: DEBUG nova.compute.provider_tree [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.212809] env[68569]: INFO nova.compute.manager [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Terminating instance [ 694.216400] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd67c28-5fdd-48de-a221-a69588d781db {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.345228] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166701, 'name': CreateVM_Task, 'duration_secs': 0.43198} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.345498] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 694.346356] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.346473] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.346760] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 694.347053] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6e65e40-fa4c-4cca-91a0-a9584132b8f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.353225] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 694.353225] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a5ef0f-7c70-7b67-357a-6d86ad35c65d" [ 694.353225] env[68569]: _type = "Task" [ 694.353225] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.361801] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a5ef0f-7c70-7b67-357a-6d86ad35c65d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.366107] env[68569]: DEBUG nova.network.neutron [req-6e17f0b3-481c-4f35-a66c-91c1191fd94c req-633847e4-5ee5-471f-82d8-1d6bbeea9beb service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Updated VIF entry in instance network info cache for port f467ee4f-f180-4173-a745-c02a31527645. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 694.366430] env[68569]: DEBUG nova.network.neutron [req-6e17f0b3-481c-4f35-a66c-91c1191fd94c req-633847e4-5ee5-471f-82d8-1d6bbeea9beb service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Updating instance_info_cache with network_info: [{"id": "f467ee4f-f180-4173-a745-c02a31527645", "address": "fa:16:3e:a9:29:c7", "network": {"id": "5f571618-9e86-414b-9df9-ef4f3e050fda", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-716941586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef27055c27a04f7e9199b9c02efa7fcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf467ee4f-f1", "ovs_interfaceid": "f467ee4f-f180-4173-a745-c02a31527645", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.470662] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166703, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.502575] env[68569]: DEBUG oslo_vmware.api [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Task: {'id': task-3166704, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140687} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.502888] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 694.503139] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 694.503372] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 694.503597] env[68569]: INFO nova.compute.manager [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Took 1.15 seconds to destroy the instance on the hypervisor. [ 694.503907] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 694.504152] env[68569]: DEBUG nova.compute.manager [-] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 694.504279] env[68569]: DEBUG nova.network.neutron [-] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 694.715786] env[68569]: DEBUG nova.scheduler.client.report [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 694.723674] env[68569]: DEBUG nova.compute.manager [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 694.723853] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 694.724960] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78077a98-6c37-4459-9db6-476cee2cd745 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.729874] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 694.730152] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7ee7ea79-7394-4960-9eb7-ac391d4c0f54 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.734738] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 694.736290] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-561e74b1-e188-4168-bd66-fd810e840213 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.741812] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 694.741812] env[68569]: value = "task-3166705" [ 694.741812] env[68569]: _type = "Task" [ 694.741812] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.750360] env[68569]: DEBUG oslo_vmware.api [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 694.750360] env[68569]: value = "task-3166706" [ 694.750360] env[68569]: _type = "Task" [ 694.750360] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.760300] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166705, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.766026] env[68569]: DEBUG oslo_vmware.api [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166706, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.867346] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a5ef0f-7c70-7b67-357a-6d86ad35c65d, 'name': SearchDatastore_Task, 'duration_secs': 0.018689} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.867958] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.868271] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 694.868521] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.868669] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.868868] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 694.869608] env[68569]: DEBUG oslo_concurrency.lockutils [req-6e17f0b3-481c-4f35-a66c-91c1191fd94c req-633847e4-5ee5-471f-82d8-1d6bbeea9beb service nova] Releasing lock "refresh_cache-bda7e09b-848b-4d5d-a49d-6e0639f22f99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.869969] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1768792e-d699-4f70-8455-d7c7f870eaf6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.880943] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 694.881173] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 694.882204] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6683d7d9-ab3f-4d99-a461-0202889f46e5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.889578] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 694.889578] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c58d0a-b328-a2b2-ba84-cccdaeb8aacf" [ 694.889578] env[68569]: _type = "Task" [ 694.889578] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.901519] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c58d0a-b328-a2b2-ba84-cccdaeb8aacf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.910856] env[68569]: DEBUG nova.network.neutron [req-39737c26-1a33-497c-8de8-9569fe99bc0b req-877e8dc0-f123-4163-b975-1f8db1a2077b service nova] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Updated VIF entry in instance network info cache for port 15a27b78-8a00-40e9-af61-368e8645b1ed. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 694.911261] env[68569]: DEBUG nova.network.neutron [req-39737c26-1a33-497c-8de8-9569fe99bc0b req-877e8dc0-f123-4163-b975-1f8db1a2077b service nova] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Updating instance_info_cache with network_info: [{"id": "15a27b78-8a00-40e9-af61-368e8645b1ed", "address": "fa:16:3e:64:5e:b9", "network": {"id": "a4dfe87c-fd90-4643-a4b6-43c5de9bb65b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-789473346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10edb645f90c45edbd3aa43bfa24b86e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15a27b78-8a", "ovs_interfaceid": "15a27b78-8a00-40e9-af61-368e8645b1ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.976164] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166703, 'name': PowerOnVM_Task, 'duration_secs': 0.8676} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.976164] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 694.976164] env[68569]: INFO nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Took 9.18 seconds to spawn the instance on the hypervisor. [ 694.976322] env[68569]: DEBUG nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 694.977139] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918f0caa-c4da-4852-895f-fe7529385353 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.221649] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.750s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.222282] env[68569]: DEBUG nova.compute.manager [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 695.224952] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.906s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.225942] env[68569]: DEBUG nova.objects.instance [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Lazy-loading 'resources' on Instance uuid 9bb06e82-cc5c-4673-b1f6-aae87568aa9c {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 695.253754] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166705, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.264165] env[68569]: DEBUG oslo_vmware.api [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166706, 'name': PowerOffVM_Task, 'duration_secs': 0.261298} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.264307] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 695.264385] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 695.264643] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-308e1471-b5d8-4c52-8320-b17ae3f9711f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.349303] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 695.349572] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 695.350144] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Deleting the datastore file [datastore2] bda7e09b-848b-4d5d-a49d-6e0639f22f99 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 695.350144] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c95b735-afc5-4183-af6d-26f8142e12c7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.357361] env[68569]: DEBUG oslo_vmware.api [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 695.357361] env[68569]: value = "task-3166708" [ 695.357361] env[68569]: _type = "Task" [ 695.357361] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.367214] env[68569]: DEBUG oslo_vmware.api [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166708, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.401760] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c58d0a-b328-a2b2-ba84-cccdaeb8aacf, 'name': SearchDatastore_Task, 'duration_secs': 0.010922} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.402622] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f336a13-8ba5-454d-a6ad-7590eb70bc6b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.409189] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 695.409189] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5277040b-1067-3c84-76dd-85d49d551b07" [ 695.409189] env[68569]: _type = "Task" [ 695.409189] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.413699] env[68569]: DEBUG oslo_concurrency.lockutils [req-39737c26-1a33-497c-8de8-9569fe99bc0b req-877e8dc0-f123-4163-b975-1f8db1a2077b service nova] Releasing lock "refresh_cache-78f486aa-80f4-4d43-bd00-cc6206517a72" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.418666] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5277040b-1067-3c84-76dd-85d49d551b07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.421147] env[68569]: DEBUG nova.network.neutron [-] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.495199] env[68569]: INFO nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Took 47.35 seconds to build instance. [ 695.731039] env[68569]: DEBUG nova.compute.utils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 695.735033] env[68569]: DEBUG nova.compute.manager [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 695.735146] env[68569]: DEBUG nova.network.neutron [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 695.756719] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166705, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.788173] env[68569]: DEBUG nova.policy [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '48f0153c75da4790905b1f734eb447e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb7d044e2a2e4568b5c8c922b17a81ce', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 695.870317] env[68569]: DEBUG oslo_vmware.api [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166708, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176929} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.870317] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 695.870317] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 695.870317] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 695.870317] env[68569]: INFO nova.compute.manager [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Took 1.15 seconds to destroy the instance on the hypervisor. [ 695.870523] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 695.870604] env[68569]: DEBUG nova.compute.manager [-] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 695.870740] env[68569]: DEBUG nova.network.neutron [-] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 695.927194] env[68569]: INFO nova.compute.manager [-] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Took 1.42 seconds to deallocate network for instance. [ 695.927601] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5277040b-1067-3c84-76dd-85d49d551b07, 'name': SearchDatastore_Task, 'duration_secs': 0.011603} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.929921] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.930525] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 78f486aa-80f4-4d43-bd00-cc6206517a72/78f486aa-80f4-4d43-bd00-cc6206517a72.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 695.936108] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2de4fcef-cd6f-4fb8-84ed-46897d487d82 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.946039] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 695.946039] env[68569]: value = "task-3166709" [ 695.946039] env[68569]: _type = "Task" [ 695.946039] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.960274] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166709, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.997290] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "ec64b2fd-2409-4af1-8f51-cc0ccbba14f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.929s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.081251] env[68569]: DEBUG nova.compute.manager [req-a8d72d07-d299-4956-b175-60dda41d5417 req-5650b8ef-39ad-48ac-ab41-20fd8e8f2a08 service nova] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Received event network-vif-deleted-8713f6a0-21e6-4e38-b1ff-3fb0deaa5c23 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 696.236356] env[68569]: DEBUG nova.network.neutron [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Successfully created port: 1578930e-ed30-4f23-9d6a-55f02a439b25 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.242037] env[68569]: DEBUG nova.compute.manager [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 696.268868] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166705, 'name': CreateSnapshot_Task, 'duration_secs': 1.19359} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.269346] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 696.270253] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2bc1a2c-bedb-4b37-81c0-96c210a1403f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.442015] env[68569]: DEBUG oslo_concurrency.lockutils [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.459312] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166709, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49218} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.462100] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 78f486aa-80f4-4d43-bd00-cc6206517a72/78f486aa-80f4-4d43-bd00-cc6206517a72.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 696.462334] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 696.463831] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e0faf29-0f18-4bbd-950b-b92a2b3ac220 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.470699] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 696.470699] env[68569]: value = "task-3166710" [ 696.470699] env[68569]: _type = "Task" [ 696.470699] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.484747] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166710, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.491017] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-065b3081-be52-49e5-a47b-d22c21178274 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.499671] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c4c39d-c311-4510-864e-614c34b37d47 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.503591] env[68569]: DEBUG nova.compute.manager [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 696.541020] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea57593-2775-48bd-9e92-4dca49b6cad3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.551249] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5887faa-d343-47a4-87ed-6edc44b0d87d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.569948] env[68569]: DEBUG nova.compute.provider_tree [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.798645] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 696.799257] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d34b431d-5d85-4b8b-bfe7-80934300b4a4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.809054] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 696.809054] env[68569]: value = "task-3166711" [ 696.809054] env[68569]: _type = "Task" [ 696.809054] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.818068] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166711, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.835487] env[68569]: DEBUG nova.network.neutron [-] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.982974] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166710, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078238} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.983300] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 696.984139] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b4af83-4814-43b5-b455-b53f5ee0b5a4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.009374] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 78f486aa-80f4-4d43-bd00-cc6206517a72/78f486aa-80f4-4d43-bd00-cc6206517a72.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 697.009740] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79802398-0e81-4c43-9d82-ddbbf7b1ae2b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.038537] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 697.038537] env[68569]: value = "task-3166712" [ 697.038537] env[68569]: _type = "Task" [ 697.038537] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.045269] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.049519] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166712, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.072814] env[68569]: DEBUG nova.scheduler.client.report [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 697.252924] env[68569]: DEBUG nova.compute.manager [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 697.290773] env[68569]: DEBUG nova.virt.hardware [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:51:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='8f751b17-ac71-4dd8-99d2-bcdbe944f057',id=35,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1705354478',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 697.291140] env[68569]: DEBUG nova.virt.hardware [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 697.291368] env[68569]: DEBUG nova.virt.hardware [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 697.291601] env[68569]: DEBUG nova.virt.hardware [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 697.291789] env[68569]: DEBUG nova.virt.hardware [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 697.291981] env[68569]: DEBUG nova.virt.hardware [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 697.292309] env[68569]: DEBUG nova.virt.hardware [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 697.292532] env[68569]: DEBUG nova.virt.hardware [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 697.292768] env[68569]: DEBUG nova.virt.hardware [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 697.293307] env[68569]: DEBUG nova.virt.hardware [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 697.293307] env[68569]: DEBUG nova.virt.hardware [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 697.294484] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80cbbea-815e-4295-8e9a-281cee7a52ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.306450] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc7900f-a40c-436d-89ea-04a7209087a6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.341233] env[68569]: INFO nova.compute.manager [-] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Took 1.47 seconds to deallocate network for instance. [ 697.341982] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166711, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.550025] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166712, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.579120] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.354s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.581520] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 32.100s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.581714] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.581869] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68569) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 697.582183] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.725s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.583723] env[68569]: INFO nova.compute.claims [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 697.586985] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3219a690-5035-4b7d-9953-b46ac7e5ed22 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.599719] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80c492f-f06e-4b63-aabc-6f12f4fdd922 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.608819] env[68569]: INFO nova.scheduler.client.report [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Deleted allocations for instance 9bb06e82-cc5c-4673-b1f6-aae87568aa9c [ 697.624944] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff537a7-d41c-4d12-93f0-eac9c8c32103 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.633271] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f02a8fe-0a9a-4c30-ba1d-b458a20ad86c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.670528] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179486MB free_disk=128GB free_vcpus=48 pci_devices=None {{(pid=68569) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 697.670733] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.825810] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166711, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.853813] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.985194] env[68569]: DEBUG nova.network.neutron [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Successfully updated port: 1578930e-ed30-4f23-9d6a-55f02a439b25 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 698.048952] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166712, 'name': ReconfigVM_Task, 'duration_secs': 0.612748} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.049273] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 78f486aa-80f4-4d43-bd00-cc6206517a72/78f486aa-80f4-4d43-bd00-cc6206517a72.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 698.049919] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0432bf3-b182-4bb6-ac7b-31542fc4bda0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.058089] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 698.058089] env[68569]: value = "task-3166713" [ 698.058089] env[68569]: _type = "Task" [ 698.058089] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.067687] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166713, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.119462] env[68569]: DEBUG nova.compute.manager [req-b457909d-46f0-4d1a-809f-e32bd4f6ffa5 req-1f96b27c-72ae-4de6-8844-ff4827604775 service nova] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Received event network-vif-deleted-f467ee4f-f180-4173-a745-c02a31527645 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 698.119672] env[68569]: DEBUG nova.compute.manager [req-b457909d-46f0-4d1a-809f-e32bd4f6ffa5 req-1f96b27c-72ae-4de6-8844-ff4827604775 service nova] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Received event network-vif-plugged-1578930e-ed30-4f23-9d6a-55f02a439b25 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 698.119865] env[68569]: DEBUG oslo_concurrency.lockutils [req-b457909d-46f0-4d1a-809f-e32bd4f6ffa5 req-1f96b27c-72ae-4de6-8844-ff4827604775 service nova] Acquiring lock "912303de-a79d-41b0-ab44-c79e850a4dee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.120078] env[68569]: DEBUG oslo_concurrency.lockutils [req-b457909d-46f0-4d1a-809f-e32bd4f6ffa5 req-1f96b27c-72ae-4de6-8844-ff4827604775 service nova] Lock "912303de-a79d-41b0-ab44-c79e850a4dee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.120249] env[68569]: DEBUG oslo_concurrency.lockutils [req-b457909d-46f0-4d1a-809f-e32bd4f6ffa5 req-1f96b27c-72ae-4de6-8844-ff4827604775 service nova] Lock "912303de-a79d-41b0-ab44-c79e850a4dee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.120440] env[68569]: DEBUG nova.compute.manager [req-b457909d-46f0-4d1a-809f-e32bd4f6ffa5 req-1f96b27c-72ae-4de6-8844-ff4827604775 service nova] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] No waiting events found dispatching network-vif-plugged-1578930e-ed30-4f23-9d6a-55f02a439b25 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 698.120612] env[68569]: WARNING nova.compute.manager [req-b457909d-46f0-4d1a-809f-e32bd4f6ffa5 req-1f96b27c-72ae-4de6-8844-ff4827604775 service nova] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Received unexpected event network-vif-plugged-1578930e-ed30-4f23-9d6a-55f02a439b25 for instance with vm_state building and task_state spawning. [ 698.120768] env[68569]: DEBUG nova.compute.manager [req-b457909d-46f0-4d1a-809f-e32bd4f6ffa5 req-1f96b27c-72ae-4de6-8844-ff4827604775 service nova] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Received event network-changed-1578930e-ed30-4f23-9d6a-55f02a439b25 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 698.120917] env[68569]: DEBUG nova.compute.manager [req-b457909d-46f0-4d1a-809f-e32bd4f6ffa5 req-1f96b27c-72ae-4de6-8844-ff4827604775 service nova] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Refreshing instance network info cache due to event network-changed-1578930e-ed30-4f23-9d6a-55f02a439b25. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 698.121210] env[68569]: DEBUG oslo_concurrency.lockutils [req-b457909d-46f0-4d1a-809f-e32bd4f6ffa5 req-1f96b27c-72ae-4de6-8844-ff4827604775 service nova] Acquiring lock "refresh_cache-912303de-a79d-41b0-ab44-c79e850a4dee" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.121243] env[68569]: DEBUG oslo_concurrency.lockutils [req-b457909d-46f0-4d1a-809f-e32bd4f6ffa5 req-1f96b27c-72ae-4de6-8844-ff4827604775 service nova] Acquired lock "refresh_cache-912303de-a79d-41b0-ab44-c79e850a4dee" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.121393] env[68569]: DEBUG nova.network.neutron [req-b457909d-46f0-4d1a-809f-e32bd4f6ffa5 req-1f96b27c-72ae-4de6-8844-ff4827604775 service nova] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Refreshing network info cache for port 1578930e-ed30-4f23-9d6a-55f02a439b25 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 698.131521] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2c2065db-956a-4e25-b755-368b3290e2fa tempest-ServerShowV254Test-927720790 tempest-ServerShowV254Test-927720790-project-member] Lock "9bb06e82-cc5c-4673-b1f6-aae87568aa9c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.887s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.324541] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166711, 'name': CloneVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.489755] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "refresh_cache-912303de-a79d-41b0-ab44-c79e850a4dee" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.568329] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166713, 'name': Rename_Task, 'duration_secs': 0.226838} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.568612] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 698.568852] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c88a1c9a-de1f-468b-8769-b43252206803 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.579117] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 698.579117] env[68569]: value = "task-3166714" [ 698.579117] env[68569]: _type = "Task" [ 698.579117] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.588340] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166714, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.662233] env[68569]: DEBUG nova.network.neutron [req-b457909d-46f0-4d1a-809f-e32bd4f6ffa5 req-1f96b27c-72ae-4de6-8844-ff4827604775 service nova] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.731524] env[68569]: DEBUG nova.network.neutron [req-b457909d-46f0-4d1a-809f-e32bd4f6ffa5 req-1f96b27c-72ae-4de6-8844-ff4827604775 service nova] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.829160] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166711, 'name': CloneVM_Task, 'duration_secs': 1.591736} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.832455] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Created linked-clone VM from snapshot [ 698.834029] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b280a48-9dd6-46e9-b272-f01a0b5c8b34 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.842294] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Uploading image 95267f9c-e3ac-4b36-822a-0660497fa9f9 {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 698.863462] env[68569]: DEBUG oslo_vmware.rw_handles [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 698.863462] env[68569]: value = "vm-633528" [ 698.863462] env[68569]: _type = "VirtualMachine" [ 698.863462] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 698.863751] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ce9fcfae-a344-4491-b365-1f11ffcd0247 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.875967] env[68569]: DEBUG oslo_vmware.rw_handles [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Lease: (returnval){ [ 698.875967] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d232f0-615c-e87d-c667-704324b7e8ce" [ 698.875967] env[68569]: _type = "HttpNfcLease" [ 698.875967] env[68569]: } obtained for exporting VM: (result){ [ 698.875967] env[68569]: value = "vm-633528" [ 698.875967] env[68569]: _type = "VirtualMachine" [ 698.875967] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 698.876259] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the lease: (returnval){ [ 698.876259] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d232f0-615c-e87d-c667-704324b7e8ce" [ 698.876259] env[68569]: _type = "HttpNfcLease" [ 698.876259] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 698.883506] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 698.883506] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d232f0-615c-e87d-c667-704324b7e8ce" [ 698.883506] env[68569]: _type = "HttpNfcLease" [ 698.883506] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 699.090991] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166714, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.113327] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8211bc42-ce12-45b7-8e58-bb0b1e1ec053 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.121760] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1481f813-c819-4170-9eb5-3d9c4d266939 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.156029] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8fe527-37a4-49ba-8cd3-5cacc45dabd9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.164872] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fac6332-bd21-442e-9b7e-80c36710e592 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.179646] env[68569]: DEBUG nova.compute.provider_tree [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.234241] env[68569]: DEBUG oslo_concurrency.lockutils [req-b457909d-46f0-4d1a-809f-e32bd4f6ffa5 req-1f96b27c-72ae-4de6-8844-ff4827604775 service nova] Releasing lock "refresh_cache-912303de-a79d-41b0-ab44-c79e850a4dee" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.234480] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquired lock "refresh_cache-912303de-a79d-41b0-ab44-c79e850a4dee" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.235493] env[68569]: DEBUG nova.network.neutron [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 699.385028] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 699.385028] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d232f0-615c-e87d-c667-704324b7e8ce" [ 699.385028] env[68569]: _type = "HttpNfcLease" [ 699.385028] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 699.385028] env[68569]: DEBUG oslo_vmware.rw_handles [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 699.385028] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d232f0-615c-e87d-c667-704324b7e8ce" [ 699.385028] env[68569]: _type = "HttpNfcLease" [ 699.385028] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 699.385686] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabafea4-8f5f-4877-880b-ea01c927df19 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.394428] env[68569]: DEBUG oslo_vmware.rw_handles [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52976b34-1f4f-fe77-2b51-b87bf524a07b/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 699.394626] env[68569]: DEBUG oslo_vmware.rw_handles [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52976b34-1f4f-fe77-2b51-b87bf524a07b/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 699.505032] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-67295060-db8c-483c-a63e-4978f4e184eb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.590104] env[68569]: DEBUG oslo_vmware.api [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166714, 'name': PowerOnVM_Task, 'duration_secs': 0.964076} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.590488] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 699.590711] env[68569]: INFO nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Took 8.42 seconds to spawn the instance on the hypervisor. [ 699.590892] env[68569]: DEBUG nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 699.591672] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de614e0-7c5e-41e7-884a-22a5b8500321 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.683019] env[68569]: DEBUG nova.scheduler.client.report [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 699.767719] env[68569]: DEBUG nova.network.neutron [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.951389] env[68569]: DEBUG nova.network.neutron [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Updating instance_info_cache with network_info: [{"id": "1578930e-ed30-4f23-9d6a-55f02a439b25", "address": "fa:16:3e:b1:40:1d", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1578930e-ed", "ovs_interfaceid": "1578930e-ed30-4f23-9d6a-55f02a439b25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.113319] env[68569]: INFO nova.compute.manager [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Took 51.74 seconds to build instance. [ 700.190885] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.608s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.191690] env[68569]: DEBUG nova.compute.manager [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 700.195278] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.244s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.197585] env[68569]: INFO nova.compute.claims [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 700.430767] env[68569]: DEBUG oslo_vmware.rw_handles [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528feea4-1932-f4e4-a199-5f68188a341f/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 700.431917] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3c944e-f5f3-4605-8ea3-f84e8241850c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.439563] env[68569]: DEBUG oslo_vmware.rw_handles [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528feea4-1932-f4e4-a199-5f68188a341f/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 700.439756] env[68569]: ERROR oslo_vmware.rw_handles [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528feea4-1932-f4e4-a199-5f68188a341f/disk-0.vmdk due to incomplete transfer. [ 700.439998] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f736b9a2-1a27-4f6a-832c-3f29ac51326a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.448520] env[68569]: DEBUG oslo_vmware.rw_handles [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528feea4-1932-f4e4-a199-5f68188a341f/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 700.448729] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Uploaded image 8aaa5fed-efbe-4437-b896-3e81de63e0fd to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 700.451026] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 700.451232] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0bf01496-0d4d-42b0-9910-915b0733fc28 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.454467] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Releasing lock "refresh_cache-912303de-a79d-41b0-ab44-c79e850a4dee" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.454752] env[68569]: DEBUG nova.compute.manager [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Instance network_info: |[{"id": "1578930e-ed30-4f23-9d6a-55f02a439b25", "address": "fa:16:3e:b1:40:1d", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1578930e-ed", "ovs_interfaceid": "1578930e-ed30-4f23-9d6a-55f02a439b25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 700.455451] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:40:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1578930e-ed30-4f23-9d6a-55f02a439b25', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 700.463215] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 700.464953] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 700.465411] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 700.465411] env[68569]: value = "task-3166716" [ 700.465411] env[68569]: _type = "Task" [ 700.465411] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.465684] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cdd8f9e8-5bba-4672-938a-a670f981a27a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.491837] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166716, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.493328] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 700.493328] env[68569]: value = "task-3166717" [ 700.493328] env[68569]: _type = "Task" [ 700.493328] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.501922] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166717, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.615075] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09d7e36b-476f-4a93-8d4d-79ae3d777df3 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "78f486aa-80f4-4d43-bd00-cc6206517a72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.494s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.704896] env[68569]: DEBUG nova.compute.utils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 700.707091] env[68569]: DEBUG nova.compute.manager [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 700.707428] env[68569]: DEBUG nova.network.neutron [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 700.724905] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "ec64b2fd-2409-4af1-8f51-cc0ccbba14f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.725412] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "ec64b2fd-2409-4af1-8f51-cc0ccbba14f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.726212] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "ec64b2fd-2409-4af1-8f51-cc0ccbba14f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.726212] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "ec64b2fd-2409-4af1-8f51-cc0ccbba14f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.726212] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "ec64b2fd-2409-4af1-8f51-cc0ccbba14f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.728709] env[68569]: INFO nova.compute.manager [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Terminating instance [ 700.747296] env[68569]: DEBUG nova.policy [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f23ccc9a0c054d6db6bd439e15b1e5ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d69fb55927344ad96aa0bf26f8f230e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 700.812098] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "78f486aa-80f4-4d43-bd00-cc6206517a72" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.812098] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "78f486aa-80f4-4d43-bd00-cc6206517a72" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.812098] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "78f486aa-80f4-4d43-bd00-cc6206517a72-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.812098] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "78f486aa-80f4-4d43-bd00-cc6206517a72-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.812432] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "78f486aa-80f4-4d43-bd00-cc6206517a72-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.814294] env[68569]: INFO nova.compute.manager [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Terminating instance [ 700.991849] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166716, 'name': Destroy_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.002326] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166717, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.056013] env[68569]: DEBUG nova.network.neutron [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Successfully created port: 6bb42ed5-e951-4b70-af61-7b4cb927ce5f {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 701.118439] env[68569]: DEBUG nova.compute.manager [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 701.208144] env[68569]: DEBUG nova.compute.manager [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 701.232694] env[68569]: DEBUG nova.compute.manager [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 701.232937] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 701.233952] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d596bd0e-fbcd-4e17-bc0e-ee46fe0e2136 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.242451] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 701.242551] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8f3aad3-db19-409c-a80f-2be1deee3bbf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.251899] env[68569]: DEBUG oslo_vmware.api [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 701.251899] env[68569]: value = "task-3166718" [ 701.251899] env[68569]: _type = "Task" [ 701.251899] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.261032] env[68569]: DEBUG oslo_vmware.api [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166718, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.317685] env[68569]: DEBUG nova.compute.manager [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 701.320583] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 701.320583] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180f020b-1cfe-4e0f-af9d-18ae4e9e3525 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.333721] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 701.333721] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-665e33b8-8156-428a-b12b-d9a42945cf2a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.347197] env[68569]: DEBUG oslo_vmware.api [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 701.347197] env[68569]: value = "task-3166719" [ 701.347197] env[68569]: _type = "Task" [ 701.347197] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.356252] env[68569]: DEBUG oslo_vmware.api [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166719, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.492553] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166716, 'name': Destroy_Task, 'duration_secs': 0.657022} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.492862] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Destroyed the VM [ 701.492925] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 701.493187] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b36253d8-d4de-45cf-bf9b-17e2bda88fbe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.502549] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 701.502549] env[68569]: value = "task-3166720" [ 701.502549] env[68569]: _type = "Task" [ 701.502549] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.506497] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166717, 'name': CreateVM_Task, 'duration_secs': 0.513553} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.510818] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 701.510818] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.510818] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.511076] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 701.511707] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9833baf4-92ff-4951-a5f5-75ab342e7ff4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.519042] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166720, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.522697] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 701.522697] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527cae60-3c29-5216-9944-752cf3f09b81" [ 701.522697] env[68569]: _type = "Task" [ 701.522697] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.530937] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527cae60-3c29-5216-9944-752cf3f09b81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.636131] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.763917] env[68569]: DEBUG oslo_vmware.api [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166718, 'name': PowerOffVM_Task, 'duration_secs': 0.215327} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.764265] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 701.764438] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 701.764953] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d90e39c0-b3d4-4be2-880e-71df25f408d9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.787011] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53508e69-4e70-4809-a9be-a97c06c79460 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.796319] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73333f2-63e5-4f78-be38-cae726927f32 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.843410] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05267fbd-0818-49d7-adf8-8efb6567c0ca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.847797] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 701.848060] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 701.848407] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Deleting the datastore file [datastore2] ec64b2fd-2409-4af1-8f51-cc0ccbba14f2 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 701.848895] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-027b8ed0-210f-4e92-88ae-cdd9e67f73a4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.864869] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7bea89-31c4-4d8a-bb0f-74fcbbb7160d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.877437] env[68569]: DEBUG oslo_vmware.api [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166719, 'name': PowerOffVM_Task, 'duration_secs': 0.277076} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.877785] env[68569]: DEBUG oslo_vmware.api [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 701.877785] env[68569]: value = "task-3166722" [ 701.877785] env[68569]: _type = "Task" [ 701.877785] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.878511] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 701.878712] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 701.879093] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9c3ee10-6995-4469-9a1f-087fd71b208d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.891458] env[68569]: DEBUG nova.compute.provider_tree [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.897801] env[68569]: DEBUG oslo_vmware.api [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166722, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.954297] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 701.954379] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 701.955147] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Deleting the datastore file [datastore1] 78f486aa-80f4-4d43-bd00-cc6206517a72 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 701.955147] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9cfe9570-ccac-4c6e-8cce-acedd1fdb339 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.960981] env[68569]: DEBUG oslo_vmware.api [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 701.960981] env[68569]: value = "task-3166724" [ 701.960981] env[68569]: _type = "Task" [ 701.960981] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.969371] env[68569]: DEBUG oslo_vmware.api [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166724, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.016636] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166720, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.032907] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527cae60-3c29-5216-9944-752cf3f09b81, 'name': SearchDatastore_Task, 'duration_secs': 0.012583} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.033632] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 702.033632] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 702.033905] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.034060] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.034240] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 702.034511] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2da51866-04c7-48b5-8bc0-dfa451e3feee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.054759] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 702.054967] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 702.055739] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a96d5d9-959f-471b-91b6-f6dd12994098 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.061529] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 702.061529] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525122c2-577d-6801-eebc-c88afe1367c4" [ 702.061529] env[68569]: _type = "Task" [ 702.061529] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.070221] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525122c2-577d-6801-eebc-c88afe1367c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.220684] env[68569]: DEBUG nova.compute.manager [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 702.245398] env[68569]: DEBUG nova.virt.hardware [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 702.245398] env[68569]: DEBUG nova.virt.hardware [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 702.245669] env[68569]: DEBUG nova.virt.hardware [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 702.245727] env[68569]: DEBUG nova.virt.hardware [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 702.246082] env[68569]: DEBUG nova.virt.hardware [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 702.246082] env[68569]: DEBUG nova.virt.hardware [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 702.246545] env[68569]: DEBUG nova.virt.hardware [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 702.246545] env[68569]: DEBUG nova.virt.hardware [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 702.246827] env[68569]: DEBUG nova.virt.hardware [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 702.247276] env[68569]: DEBUG nova.virt.hardware [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 702.247276] env[68569]: DEBUG nova.virt.hardware [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 702.248676] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d62dd44-51c4-49f9-b29a-3c9e34e27ef6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.258773] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ba3134-5bfa-48b9-92e0-6d7d1c8f4a36 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.389084] env[68569]: DEBUG oslo_vmware.api [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166722, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191886} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.389084] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 702.389084] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 702.389084] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 702.389401] env[68569]: INFO nova.compute.manager [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Took 1.16 seconds to destroy the instance on the hypervisor. [ 702.389401] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 702.389585] env[68569]: DEBUG nova.compute.manager [-] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 702.389678] env[68569]: DEBUG nova.network.neutron [-] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 702.395257] env[68569]: DEBUG nova.scheduler.client.report [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 702.471787] env[68569]: DEBUG oslo_vmware.api [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3166724, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242782} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.472101] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 702.472293] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 702.472537] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 702.472710] env[68569]: INFO nova.compute.manager [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Took 1.15 seconds to destroy the instance on the hypervisor. [ 702.472959] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 702.473271] env[68569]: DEBUG nova.compute.manager [-] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 702.473471] env[68569]: DEBUG nova.network.neutron [-] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 702.517857] env[68569]: DEBUG oslo_vmware.api [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166720, 'name': RemoveSnapshot_Task, 'duration_secs': 0.782864} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.518167] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 702.518407] env[68569]: INFO nova.compute.manager [None req-0df55d2c-f308-455d-8f80-7e551e36e16f tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Took 17.60 seconds to snapshot the instance on the hypervisor. [ 702.572831] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525122c2-577d-6801-eebc-c88afe1367c4, 'name': SearchDatastore_Task, 'duration_secs': 0.012744} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.573645] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-432c4388-530c-4105-bc1b-d059cf581620 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.579340] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 702.579340] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5245dc40-485c-e156-3716-6e87ee26cf7a" [ 702.579340] env[68569]: _type = "Task" [ 702.579340] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.589522] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5245dc40-485c-e156-3716-6e87ee26cf7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.832718] env[68569]: DEBUG nova.compute.manager [req-ec07ecc1-c0da-4046-8929-0794af85a966 req-1ce343cf-0ebb-4120-9429-eae570a95441 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Received event network-vif-plugged-6bb42ed5-e951-4b70-af61-7b4cb927ce5f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 702.832944] env[68569]: DEBUG oslo_concurrency.lockutils [req-ec07ecc1-c0da-4046-8929-0794af85a966 req-1ce343cf-0ebb-4120-9429-eae570a95441 service nova] Acquiring lock "6606b921-4f3a-44f5-ae4e-c600f26876fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.833202] env[68569]: DEBUG oslo_concurrency.lockutils [req-ec07ecc1-c0da-4046-8929-0794af85a966 req-1ce343cf-0ebb-4120-9429-eae570a95441 service nova] Lock "6606b921-4f3a-44f5-ae4e-c600f26876fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.833324] env[68569]: DEBUG oslo_concurrency.lockutils [req-ec07ecc1-c0da-4046-8929-0794af85a966 req-1ce343cf-0ebb-4120-9429-eae570a95441 service nova] Lock "6606b921-4f3a-44f5-ae4e-c600f26876fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.833489] env[68569]: DEBUG nova.compute.manager [req-ec07ecc1-c0da-4046-8929-0794af85a966 req-1ce343cf-0ebb-4120-9429-eae570a95441 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] No waiting events found dispatching network-vif-plugged-6bb42ed5-e951-4b70-af61-7b4cb927ce5f {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 702.833652] env[68569]: WARNING nova.compute.manager [req-ec07ecc1-c0da-4046-8929-0794af85a966 req-1ce343cf-0ebb-4120-9429-eae570a95441 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Received unexpected event network-vif-plugged-6bb42ed5-e951-4b70-af61-7b4cb927ce5f for instance with vm_state building and task_state spawning. [ 702.903593] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.903593] env[68569]: DEBUG nova.compute.manager [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 702.903811] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.260s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.905218] env[68569]: INFO nova.compute.claims [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 703.089947] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5245dc40-485c-e156-3716-6e87ee26cf7a, 'name': SearchDatastore_Task, 'duration_secs': 0.025442} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.090283] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.090493] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 912303de-a79d-41b0-ab44-c79e850a4dee/912303de-a79d-41b0-ab44-c79e850a4dee.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 703.090844] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8ec7ba7-dc1f-468c-85ec-9647716913c0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.099173] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 703.099173] env[68569]: value = "task-3166725" [ 703.099173] env[68569]: _type = "Task" [ 703.099173] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.107441] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.122910] env[68569]: DEBUG nova.network.neutron [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Successfully updated port: 6bb42ed5-e951-4b70-af61-7b4cb927ce5f {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 703.411931] env[68569]: DEBUG nova.compute.utils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 703.413554] env[68569]: DEBUG nova.compute.manager [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 703.413762] env[68569]: DEBUG nova.network.neutron [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 703.483504] env[68569]: DEBUG nova.policy [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'efa968d5c9404a8ba6b41581ba1dc130', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2390bba854fb4cb18dcb02ec8fd1c302', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 703.519493] env[68569]: DEBUG nova.network.neutron [-] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.553086] env[68569]: DEBUG nova.network.neutron [-] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.611089] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166725, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.626821] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "refresh_cache-6606b921-4f3a-44f5-ae4e-c600f26876fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.630097] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquired lock "refresh_cache-6606b921-4f3a-44f5-ae4e-c600f26876fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.630097] env[68569]: DEBUG nova.network.neutron [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 703.856146] env[68569]: DEBUG nova.network.neutron [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Successfully created port: 848c4111-2c4b-472d-b66f-a7609615ff2d {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 703.917817] env[68569]: DEBUG nova.compute.manager [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 704.025459] env[68569]: INFO nova.compute.manager [-] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Took 1.64 seconds to deallocate network for instance. [ 704.057792] env[68569]: INFO nova.compute.manager [-] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Took 1.58 seconds to deallocate network for instance. [ 704.116828] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166725, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577166} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.117490] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 912303de-a79d-41b0-ab44-c79e850a4dee/912303de-a79d-41b0-ab44-c79e850a4dee.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 704.117737] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 704.118328] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35c312ca-4ac1-4206-a247-430734d35bd8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.126114] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 704.126114] env[68569]: value = "task-3166726" [ 704.126114] env[68569]: _type = "Task" [ 704.126114] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.142026] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166726, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.180834] env[68569]: DEBUG nova.network.neutron [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.375700] env[68569]: DEBUG nova.network.neutron [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Updating instance_info_cache with network_info: [{"id": "6bb42ed5-e951-4b70-af61-7b4cb927ce5f", "address": "fa:16:3e:a2:3b:71", "network": {"id": "a2e78ea6-bf69-4312-abcf-eb606f96f132", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1264634806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d69fb55927344ad96aa0bf26f8f230e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bb42ed5-e9", "ovs_interfaceid": "6bb42ed5-e951-4b70-af61-7b4cb927ce5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.528753] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-014af60d-f347-4bba-a785-72c56a47fe5b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.532513] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.538612] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0db162-3a73-4d4a-9640-ce8b94983ac2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.569116] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.570162] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f9514b-0f23-44d4-9802-1e86312987a5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.578375] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75acda94-e1a4-4274-a518-af6de3b1edf3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.593989] env[68569]: DEBUG nova.compute.provider_tree [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.636181] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166726, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074127} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.636864] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 704.637614] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40970b6-d696-4788-b26f-c19ab57abae7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.659518] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 912303de-a79d-41b0-ab44-c79e850a4dee/912303de-a79d-41b0-ab44-c79e850a4dee.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 704.659822] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0aa9edf-9a4c-475c-bb98-cfaf61e3c3e0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.680393] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 704.680393] env[68569]: value = "task-3166727" [ 704.680393] env[68569]: _type = "Task" [ 704.680393] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.689828] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166727, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.868908] env[68569]: DEBUG nova.compute.manager [req-71699f89-a172-4e80-9f6e-c318f480e38f req-9969057b-0813-45de-842c-eab4fc71a027 service nova] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Received event network-vif-deleted-925021fa-a59a-4390-b3c2-3290381e5664 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 704.869255] env[68569]: DEBUG nova.compute.manager [req-71699f89-a172-4e80-9f6e-c318f480e38f req-9969057b-0813-45de-842c-eab4fc71a027 service nova] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Received event network-vif-deleted-15a27b78-8a00-40e9-af61-368e8645b1ed {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 704.869502] env[68569]: DEBUG nova.compute.manager [req-71699f89-a172-4e80-9f6e-c318f480e38f req-9969057b-0813-45de-842c-eab4fc71a027 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Received event network-changed-6bb42ed5-e951-4b70-af61-7b4cb927ce5f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 704.869651] env[68569]: DEBUG nova.compute.manager [req-71699f89-a172-4e80-9f6e-c318f480e38f req-9969057b-0813-45de-842c-eab4fc71a027 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Refreshing instance network info cache due to event network-changed-6bb42ed5-e951-4b70-af61-7b4cb927ce5f. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 704.869872] env[68569]: DEBUG oslo_concurrency.lockutils [req-71699f89-a172-4e80-9f6e-c318f480e38f req-9969057b-0813-45de-842c-eab4fc71a027 service nova] Acquiring lock "refresh_cache-6606b921-4f3a-44f5-ae4e-c600f26876fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.880326] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Releasing lock "refresh_cache-6606b921-4f3a-44f5-ae4e-c600f26876fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 704.880739] env[68569]: DEBUG nova.compute.manager [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Instance network_info: |[{"id": "6bb42ed5-e951-4b70-af61-7b4cb927ce5f", "address": "fa:16:3e:a2:3b:71", "network": {"id": "a2e78ea6-bf69-4312-abcf-eb606f96f132", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1264634806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d69fb55927344ad96aa0bf26f8f230e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bb42ed5-e9", "ovs_interfaceid": "6bb42ed5-e951-4b70-af61-7b4cb927ce5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 704.881277] env[68569]: DEBUG oslo_concurrency.lockutils [req-71699f89-a172-4e80-9f6e-c318f480e38f req-9969057b-0813-45de-842c-eab4fc71a027 service nova] Acquired lock "refresh_cache-6606b921-4f3a-44f5-ae4e-c600f26876fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 704.881467] env[68569]: DEBUG nova.network.neutron [req-71699f89-a172-4e80-9f6e-c318f480e38f req-9969057b-0813-45de-842c-eab4fc71a027 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Refreshing network info cache for port 6bb42ed5-e951-4b70-af61-7b4cb927ce5f {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 704.883226] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:3b:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db68bd64-5b56-49af-a075-13dcf85cb2e0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6bb42ed5-e951-4b70-af61-7b4cb927ce5f', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 704.891358] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 704.893064] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 704.893306] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a04e4c6a-4b48-489b-ae3e-8ce200e219ea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.914319] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 704.914319] env[68569]: value = "task-3166728" [ 704.914319] env[68569]: _type = "Task" [ 704.914319] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.924119] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166728, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.926485] env[68569]: DEBUG nova.compute.manager [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 704.949635] env[68569]: DEBUG nova.virt.hardware [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 704.949903] env[68569]: DEBUG nova.virt.hardware [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.950090] env[68569]: DEBUG nova.virt.hardware [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 704.950291] env[68569]: DEBUG nova.virt.hardware [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.950548] env[68569]: DEBUG nova.virt.hardware [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 704.950741] env[68569]: DEBUG nova.virt.hardware [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 704.950960] env[68569]: DEBUG nova.virt.hardware [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 704.951156] env[68569]: DEBUG nova.virt.hardware [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 704.951334] env[68569]: DEBUG nova.virt.hardware [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 704.951513] env[68569]: DEBUG nova.virt.hardware [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 704.951702] env[68569]: DEBUG nova.virt.hardware [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 704.952602] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4092edd6-bbbb-4afb-9a54-fcaec65ec85c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.961713] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65004db9-454a-4c1a-8d16-0aaec5ee67fc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.097709] env[68569]: DEBUG nova.scheduler.client.report [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 705.190646] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166727, 'name': ReconfigVM_Task, 'duration_secs': 0.457792} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.191795] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 912303de-a79d-41b0-ab44-c79e850a4dee/912303de-a79d-41b0-ab44-c79e850a4dee.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 705.191795] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e93f2ec-09a9-4b31-bf1a-7ec66b42572b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.198326] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 705.198326] env[68569]: value = "task-3166729" [ 705.198326] env[68569]: _type = "Task" [ 705.198326] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.207838] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166729, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.339231] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "c9264123-ab19-40d5-959a-791b8966d2f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.339575] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "c9264123-ab19-40d5-959a-791b8966d2f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.339790] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "c9264123-ab19-40d5-959a-791b8966d2f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.339972] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "c9264123-ab19-40d5-959a-791b8966d2f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.340265] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "c9264123-ab19-40d5-959a-791b8966d2f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.342404] env[68569]: INFO nova.compute.manager [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Terminating instance [ 705.427914] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166728, 'name': CreateVM_Task, 'duration_secs': 0.431932} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.428222] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 705.428932] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.429284] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.429509] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 705.429772] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87e1395c-257b-4bc7-9634-068c2c7937b4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.437026] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 705.437026] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f3142f-3d90-22fc-5e2f-39a029dcb63f" [ 705.437026] env[68569]: _type = "Task" [ 705.437026] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.447974] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f3142f-3d90-22fc-5e2f-39a029dcb63f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.604060] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.700s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.604060] env[68569]: DEBUG nova.compute.manager [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 705.607006] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.395s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.607255] env[68569]: DEBUG nova.objects.instance [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lazy-loading 'resources' on Instance uuid 303f41c0-7a19-48b2-a072-4f138f6f8156 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 705.627997] env[68569]: DEBUG nova.network.neutron [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Successfully updated port: 848c4111-2c4b-472d-b66f-a7609615ff2d {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 705.708897] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166729, 'name': Rename_Task, 'duration_secs': 0.195237} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.709219] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 705.709448] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7711c052-052c-47d4-b658-e6e786383afa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.715721] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 705.715721] env[68569]: value = "task-3166730" [ 705.715721] env[68569]: _type = "Task" [ 705.715721] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.720572] env[68569]: DEBUG nova.network.neutron [req-71699f89-a172-4e80-9f6e-c318f480e38f req-9969057b-0813-45de-842c-eab4fc71a027 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Updated VIF entry in instance network info cache for port 6bb42ed5-e951-4b70-af61-7b4cb927ce5f. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 705.720899] env[68569]: DEBUG nova.network.neutron [req-71699f89-a172-4e80-9f6e-c318f480e38f req-9969057b-0813-45de-842c-eab4fc71a027 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Updating instance_info_cache with network_info: [{"id": "6bb42ed5-e951-4b70-af61-7b4cb927ce5f", "address": "fa:16:3e:a2:3b:71", "network": {"id": "a2e78ea6-bf69-4312-abcf-eb606f96f132", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1264634806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d69fb55927344ad96aa0bf26f8f230e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bb42ed5-e9", "ovs_interfaceid": "6bb42ed5-e951-4b70-af61-7b4cb927ce5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.725810] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166730, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.845814] env[68569]: DEBUG nova.compute.manager [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 705.846074] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 705.846962] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5871a4ab-18ce-42de-9e2a-018001422d53 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.855017] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 705.855225] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27958a7b-4a47-4b42-8a22-1ea5d0cc6939 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.862181] env[68569]: DEBUG oslo_vmware.api [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 705.862181] env[68569]: value = "task-3166731" [ 705.862181] env[68569]: _type = "Task" [ 705.862181] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.870242] env[68569]: DEBUG oslo_vmware.api [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166731, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.948864] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f3142f-3d90-22fc-5e2f-39a029dcb63f, 'name': SearchDatastore_Task, 'duration_secs': 0.011413} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.949149] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.949435] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 705.949680] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.949829] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.950015] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 705.950300] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-772ec2cc-b716-40c8-8ae4-9c4cef7bf935 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.959745] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 705.959745] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 705.960470] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b113f07b-795a-4bb6-a66f-a0d80e66ef3c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.965903] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 705.965903] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5213393f-91b3-a846-4f07-c747498ebd33" [ 705.965903] env[68569]: _type = "Task" [ 705.965903] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.974139] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5213393f-91b3-a846-4f07-c747498ebd33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.110035] env[68569]: DEBUG nova.compute.utils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 706.111486] env[68569]: DEBUG nova.compute.manager [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 706.111658] env[68569]: DEBUG nova.network.neutron [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 706.131220] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Acquiring lock "refresh_cache-2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.131376] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Acquired lock "refresh_cache-2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.131604] env[68569]: DEBUG nova.network.neutron [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 706.164759] env[68569]: DEBUG nova.policy [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07f230a9b5d34fa088d03ebaeac27644', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c45df010dfc40089844060df429bb31', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 706.227828] env[68569]: DEBUG oslo_concurrency.lockutils [req-71699f89-a172-4e80-9f6e-c318f480e38f req-9969057b-0813-45de-842c-eab4fc71a027 service nova] Releasing lock "refresh_cache-6606b921-4f3a-44f5-ae4e-c600f26876fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.227999] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166730, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.374731] env[68569]: DEBUG oslo_vmware.api [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166731, 'name': PowerOffVM_Task, 'duration_secs': 0.206319} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.375031] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 706.375200] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 706.375447] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c000c776-407a-4e5d-90c7-ec15fe718c70 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.430470] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 706.430470] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 706.430470] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Deleting the datastore file [datastore2] c9264123-ab19-40d5-959a-791b8966d2f6 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 706.433145] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f18f3702-1d8f-4f2d-8814-08fd23916457 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.440053] env[68569]: DEBUG oslo_vmware.api [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 706.440053] env[68569]: value = "task-3166733" [ 706.440053] env[68569]: _type = "Task" [ 706.440053] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.449970] env[68569]: DEBUG oslo_vmware.api [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166733, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.475593] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5213393f-91b3-a846-4f07-c747498ebd33, 'name': SearchDatastore_Task, 'duration_secs': 0.010908} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.479548] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4285946-b814-4410-89ca-1f0fd8e2fbcd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.484371] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 706.484371] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f666e2-a975-fec7-db5f-722d55cfffd1" [ 706.484371] env[68569]: _type = "Task" [ 706.484371] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.494596] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f666e2-a975-fec7-db5f-722d55cfffd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.548174] env[68569]: DEBUG nova.network.neutron [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Successfully created port: 502ae245-07dd-43f0-a1dc-c733e5f5cd3a {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 706.617193] env[68569]: DEBUG nova.compute.manager [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 706.683815] env[68569]: DEBUG nova.network.neutron [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.693340] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f94385-20cd-4152-96ba-22c492341a8b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.701023] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b1d020-d713-49aa-9edc-46c80fd686e0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.737355] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80349078-fb2c-4abb-ab31-897b031187fe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.746740] env[68569]: DEBUG oslo_vmware.api [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166730, 'name': PowerOnVM_Task, 'duration_secs': 0.628154} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.748836] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 706.749069] env[68569]: INFO nova.compute.manager [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Took 9.50 seconds to spawn the instance on the hypervisor. [ 706.749254] env[68569]: DEBUG nova.compute.manager [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 706.750065] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07f424f-ce31-40e8-b3b1-b80cbefef14a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.753383] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375ff12f-9177-4c24-b63a-e28daed6b015 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.771936] env[68569]: DEBUG nova.compute.provider_tree [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.936133] env[68569]: DEBUG nova.compute.manager [req-dc520d2c-9008-4406-93e1-7a3f03993a9b req-6a419457-8942-487b-a258-30fe12791406 service nova] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Received event network-vif-plugged-848c4111-2c4b-472d-b66f-a7609615ff2d {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 706.936470] env[68569]: DEBUG oslo_concurrency.lockutils [req-dc520d2c-9008-4406-93e1-7a3f03993a9b req-6a419457-8942-487b-a258-30fe12791406 service nova] Acquiring lock "2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.936747] env[68569]: DEBUG oslo_concurrency.lockutils [req-dc520d2c-9008-4406-93e1-7a3f03993a9b req-6a419457-8942-487b-a258-30fe12791406 service nova] Lock "2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.936967] env[68569]: DEBUG oslo_concurrency.lockutils [req-dc520d2c-9008-4406-93e1-7a3f03993a9b req-6a419457-8942-487b-a258-30fe12791406 service nova] Lock "2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.937206] env[68569]: DEBUG nova.compute.manager [req-dc520d2c-9008-4406-93e1-7a3f03993a9b req-6a419457-8942-487b-a258-30fe12791406 service nova] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] No waiting events found dispatching network-vif-plugged-848c4111-2c4b-472d-b66f-a7609615ff2d {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 706.937425] env[68569]: WARNING nova.compute.manager [req-dc520d2c-9008-4406-93e1-7a3f03993a9b req-6a419457-8942-487b-a258-30fe12791406 service nova] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Received unexpected event network-vif-plugged-848c4111-2c4b-472d-b66f-a7609615ff2d for instance with vm_state building and task_state spawning. [ 706.937632] env[68569]: DEBUG nova.compute.manager [req-dc520d2c-9008-4406-93e1-7a3f03993a9b req-6a419457-8942-487b-a258-30fe12791406 service nova] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Received event network-changed-848c4111-2c4b-472d-b66f-a7609615ff2d {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 706.937831] env[68569]: DEBUG nova.compute.manager [req-dc520d2c-9008-4406-93e1-7a3f03993a9b req-6a419457-8942-487b-a258-30fe12791406 service nova] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Refreshing instance network info cache due to event network-changed-848c4111-2c4b-472d-b66f-a7609615ff2d. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 706.938055] env[68569]: DEBUG oslo_concurrency.lockutils [req-dc520d2c-9008-4406-93e1-7a3f03993a9b req-6a419457-8942-487b-a258-30fe12791406 service nova] Acquiring lock "refresh_cache-2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.950111] env[68569]: DEBUG oslo_vmware.api [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166733, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18312} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.950350] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 706.954021] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 706.954021] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 706.954021] env[68569]: INFO nova.compute.manager [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Took 1.10 seconds to destroy the instance on the hypervisor. [ 706.954021] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 706.954021] env[68569]: DEBUG nova.compute.manager [-] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 706.954021] env[68569]: DEBUG nova.network.neutron [-] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 706.961023] env[68569]: DEBUG nova.network.neutron [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Updating instance_info_cache with network_info: [{"id": "848c4111-2c4b-472d-b66f-a7609615ff2d", "address": "fa:16:3e:38:34:40", "network": {"id": "eabc1d4c-cdba-4940-9efe-c67cd4b52ccb", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-16487918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2390bba854fb4cb18dcb02ec8fd1c302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap848c4111-2c", "ovs_interfaceid": "848c4111-2c4b-472d-b66f-a7609615ff2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.997219] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f666e2-a975-fec7-db5f-722d55cfffd1, 'name': SearchDatastore_Task, 'duration_secs': 0.019302} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.998032] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.998032] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 6606b921-4f3a-44f5-ae4e-c600f26876fc/6606b921-4f3a-44f5-ae4e-c600f26876fc.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 706.998183] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5c4d184-38d9-4c02-91c1-0f10b4312537 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.005111] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 707.005111] env[68569]: value = "task-3166734" [ 707.005111] env[68569]: _type = "Task" [ 707.005111] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.012991] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166734, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.281578] env[68569]: DEBUG nova.scheduler.client.report [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 707.294015] env[68569]: INFO nova.compute.manager [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Took 43.08 seconds to build instance. [ 707.461438] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Releasing lock "refresh_cache-2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.461678] env[68569]: DEBUG nova.compute.manager [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Instance network_info: |[{"id": "848c4111-2c4b-472d-b66f-a7609615ff2d", "address": "fa:16:3e:38:34:40", "network": {"id": "eabc1d4c-cdba-4940-9efe-c67cd4b52ccb", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-16487918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2390bba854fb4cb18dcb02ec8fd1c302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap848c4111-2c", "ovs_interfaceid": "848c4111-2c4b-472d-b66f-a7609615ff2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 707.462055] env[68569]: DEBUG oslo_concurrency.lockutils [req-dc520d2c-9008-4406-93e1-7a3f03993a9b req-6a419457-8942-487b-a258-30fe12791406 service nova] Acquired lock "refresh_cache-2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.462257] env[68569]: DEBUG nova.network.neutron [req-dc520d2c-9008-4406-93e1-7a3f03993a9b req-6a419457-8942-487b-a258-30fe12791406 service nova] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Refreshing network info cache for port 848c4111-2c4b-472d-b66f-a7609615ff2d {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 707.463992] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:34:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '848c4111-2c4b-472d-b66f-a7609615ff2d', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 707.473048] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Creating folder: Project (2390bba854fb4cb18dcb02ec8fd1c302). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 707.477303] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01a9fd90-18f1-4227-a575-df0b52bb23b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.489681] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Created folder: Project (2390bba854fb4cb18dcb02ec8fd1c302) in parent group-v633430. [ 707.489907] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Creating folder: Instances. Parent ref: group-v633531. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 707.490186] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-feba1872-7281-4eb9-a9b7-6b7fc3b62de3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.506564] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Created folder: Instances in parent group-v633531. [ 707.506827] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 707.510715] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 707.511069] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b06de8c-cf58-4f97-80d9-7708c1df64b8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.534291] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166734, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.535501] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 707.535501] env[68569]: value = "task-3166737" [ 707.535501] env[68569]: _type = "Task" [ 707.535501] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.543708] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166737, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.627528] env[68569]: DEBUG nova.compute.manager [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 707.652503] env[68569]: DEBUG nova.virt.hardware [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 707.652741] env[68569]: DEBUG nova.virt.hardware [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 707.652873] env[68569]: DEBUG nova.virt.hardware [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 707.653060] env[68569]: DEBUG nova.virt.hardware [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 707.653206] env[68569]: DEBUG nova.virt.hardware [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 707.653363] env[68569]: DEBUG nova.virt.hardware [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 707.653620] env[68569]: DEBUG nova.virt.hardware [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 707.653775] env[68569]: DEBUG nova.virt.hardware [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 707.653936] env[68569]: DEBUG nova.virt.hardware [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 707.654103] env[68569]: DEBUG nova.virt.hardware [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 707.654274] env[68569]: DEBUG nova.virt.hardware [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 707.655131] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a1e003-be6f-44b0-be38-c1971d88e267 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.663574] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ec139e-f404-4b98-a4c4-f965162cc5cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.718593] env[68569]: DEBUG nova.network.neutron [-] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.723467] env[68569]: DEBUG nova.network.neutron [req-dc520d2c-9008-4406-93e1-7a3f03993a9b req-6a419457-8942-487b-a258-30fe12791406 service nova] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Updated VIF entry in instance network info cache for port 848c4111-2c4b-472d-b66f-a7609615ff2d. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 707.723847] env[68569]: DEBUG nova.network.neutron [req-dc520d2c-9008-4406-93e1-7a3f03993a9b req-6a419457-8942-487b-a258-30fe12791406 service nova] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Updating instance_info_cache with network_info: [{"id": "848c4111-2c4b-472d-b66f-a7609615ff2d", "address": "fa:16:3e:38:34:40", "network": {"id": "eabc1d4c-cdba-4940-9efe-c67cd4b52ccb", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-16487918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2390bba854fb4cb18dcb02ec8fd1c302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap848c4111-2c", "ovs_interfaceid": "848c4111-2c4b-472d-b66f-a7609615ff2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.796078] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.189s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 707.798488] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f00409a-6212-467b-843b-beb79e7975e7 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "912303de-a79d-41b0-ab44-c79e850a4dee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.602s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 707.799010] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.747s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 707.799255] env[68569]: DEBUG nova.objects.instance [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Lazy-loading 'resources' on Instance uuid f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 707.823262] env[68569]: INFO nova.scheduler.client.report [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleted allocations for instance 303f41c0-7a19-48b2-a072-4f138f6f8156 [ 708.015747] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166734, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534357} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.016123] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 6606b921-4f3a-44f5-ae4e-c600f26876fc/6606b921-4f3a-44f5-ae4e-c600f26876fc.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 708.016430] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 708.016758] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2713bd6c-f7a3-4325-a932-79e39f8a2aee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.023811] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 708.023811] env[68569]: value = "task-3166738" [ 708.023811] env[68569]: _type = "Task" [ 708.023811] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.033564] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166738, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.045099] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166737, 'name': CreateVM_Task, 'duration_secs': 0.329011} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.045808] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 708.046970] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.046970] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.046970] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 708.046970] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dc2e3e7-8a21-4c3e-bdeb-1946ea84fd7e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.053378] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Waiting for the task: (returnval){ [ 708.053378] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521811fd-d6ef-948d-d46c-d31b660dad75" [ 708.053378] env[68569]: _type = "Task" [ 708.053378] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.062132] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521811fd-d6ef-948d-d46c-d31b660dad75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.222512] env[68569]: INFO nova.compute.manager [-] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Took 1.27 seconds to deallocate network for instance. [ 708.227980] env[68569]: DEBUG oslo_concurrency.lockutils [req-dc520d2c-9008-4406-93e1-7a3f03993a9b req-6a419457-8942-487b-a258-30fe12791406 service nova] Releasing lock "refresh_cache-2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.240529] env[68569]: DEBUG nova.network.neutron [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Successfully updated port: 502ae245-07dd-43f0-a1dc-c733e5f5cd3a {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 708.304960] env[68569]: DEBUG nova.compute.manager [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 708.337220] env[68569]: DEBUG oslo_concurrency.lockutils [None req-00208e4a-8ebd-41bb-a3bc-2977081d1e97 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "303f41c0-7a19-48b2-a072-4f138f6f8156" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.642s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.342130] env[68569]: DEBUG oslo_vmware.rw_handles [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52976b34-1f4f-fe77-2b51-b87bf524a07b/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 708.343276] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3250322-ac1e-4869-94e9-72f2489e63b1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.351769] env[68569]: DEBUG oslo_vmware.rw_handles [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52976b34-1f4f-fe77-2b51-b87bf524a07b/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 708.352463] env[68569]: ERROR oslo_vmware.rw_handles [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52976b34-1f4f-fe77-2b51-b87bf524a07b/disk-0.vmdk due to incomplete transfer. [ 708.353964] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-fa2a5e63-5bf2-4c9b-8e0d-e8f656a4faf8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.362366] env[68569]: DEBUG oslo_vmware.rw_handles [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52976b34-1f4f-fe77-2b51-b87bf524a07b/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 708.362659] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Uploaded image 95267f9c-e3ac-4b36-822a-0660497fa9f9 to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 708.364257] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 708.366768] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-05981bc7-8918-4d18-a647-6cc793cd2f93 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.373157] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 708.373157] env[68569]: value = "task-3166739" [ 708.373157] env[68569]: _type = "Task" [ 708.373157] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.390600] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166739, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.534391] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166738, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068448} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.534589] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 708.536941] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fc7d0f-705e-430d-96ad-8c842902cbc0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.561783] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] 6606b921-4f3a-44f5-ae4e-c600f26876fc/6606b921-4f3a-44f5-ae4e-c600f26876fc.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 708.564741] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07e7fc9c-5e11-4784-9941-478aa3179f2f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.588417] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521811fd-d6ef-948d-d46c-d31b660dad75, 'name': SearchDatastore_Task, 'duration_secs': 0.009263} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.589816] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.590061] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 708.590301] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.590475] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.591182] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 708.591182] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 708.591182] env[68569]: value = "task-3166740" [ 708.591182] env[68569]: _type = "Task" [ 708.591182] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.593512] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6abfc1c-70a8-4dbf-8ecf-36ce62398af9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.604066] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166740, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.607474] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 708.607754] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 708.608615] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fd5a75e-ba58-43dd-8cb0-9354e4ee133e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.616311] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Waiting for the task: (returnval){ [ 708.616311] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5230f135-a6a1-d431-502b-fdc98e74ecae" [ 708.616311] env[68569]: _type = "Task" [ 708.616311] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.626150] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5230f135-a6a1-d431-502b-fdc98e74ecae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.730091] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.743430] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "refresh_cache-de2b0206-0c73-4275-89ff-37199520dd71" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.743625] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquired lock "refresh_cache-de2b0206-0c73-4275-89ff-37199520dd71" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.743794] env[68569]: DEBUG nova.network.neutron [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 708.822703] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.885999] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166739, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.890743] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35088ee8-e84a-4f6d-8716-c4dcb07ee195 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.897485] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0d6c80-edf2-4951-9573-452ab648124a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.928968] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae29081-93b5-4541-bb8a-4280905e85e4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.936490] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c35f562-3580-4363-b577-eac45634d03d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.949859] env[68569]: DEBUG nova.compute.provider_tree [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.032941] env[68569]: DEBUG nova.compute.manager [req-1458ebcc-91d4-41fe-b31e-8c8b1e3cf771 req-1eadc61d-2821-4494-a8a7-872615091b19 service nova] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Received event network-vif-deleted-9fcabc27-ebd3-48dd-bf0d-1829ee0f304f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 709.032941] env[68569]: DEBUG nova.compute.manager [req-1458ebcc-91d4-41fe-b31e-8c8b1e3cf771 req-1eadc61d-2821-4494-a8a7-872615091b19 service nova] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Received event network-vif-plugged-502ae245-07dd-43f0-a1dc-c733e5f5cd3a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 709.032941] env[68569]: DEBUG oslo_concurrency.lockutils [req-1458ebcc-91d4-41fe-b31e-8c8b1e3cf771 req-1eadc61d-2821-4494-a8a7-872615091b19 service nova] Acquiring lock "de2b0206-0c73-4275-89ff-37199520dd71-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.032941] env[68569]: DEBUG oslo_concurrency.lockutils [req-1458ebcc-91d4-41fe-b31e-8c8b1e3cf771 req-1eadc61d-2821-4494-a8a7-872615091b19 service nova] Lock "de2b0206-0c73-4275-89ff-37199520dd71-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.032941] env[68569]: DEBUG oslo_concurrency.lockutils [req-1458ebcc-91d4-41fe-b31e-8c8b1e3cf771 req-1eadc61d-2821-4494-a8a7-872615091b19 service nova] Lock "de2b0206-0c73-4275-89ff-37199520dd71-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.033208] env[68569]: DEBUG nova.compute.manager [req-1458ebcc-91d4-41fe-b31e-8c8b1e3cf771 req-1eadc61d-2821-4494-a8a7-872615091b19 service nova] [instance: de2b0206-0c73-4275-89ff-37199520dd71] No waiting events found dispatching network-vif-plugged-502ae245-07dd-43f0-a1dc-c733e5f5cd3a {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 709.033456] env[68569]: WARNING nova.compute.manager [req-1458ebcc-91d4-41fe-b31e-8c8b1e3cf771 req-1eadc61d-2821-4494-a8a7-872615091b19 service nova] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Received unexpected event network-vif-plugged-502ae245-07dd-43f0-a1dc-c733e5f5cd3a for instance with vm_state building and task_state spawning. [ 709.033665] env[68569]: DEBUG nova.compute.manager [req-1458ebcc-91d4-41fe-b31e-8c8b1e3cf771 req-1eadc61d-2821-4494-a8a7-872615091b19 service nova] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Received event network-changed-502ae245-07dd-43f0-a1dc-c733e5f5cd3a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 709.033847] env[68569]: DEBUG nova.compute.manager [req-1458ebcc-91d4-41fe-b31e-8c8b1e3cf771 req-1eadc61d-2821-4494-a8a7-872615091b19 service nova] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Refreshing instance network info cache due to event network-changed-502ae245-07dd-43f0-a1dc-c733e5f5cd3a. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 709.034044] env[68569]: DEBUG oslo_concurrency.lockutils [req-1458ebcc-91d4-41fe-b31e-8c8b1e3cf771 req-1eadc61d-2821-4494-a8a7-872615091b19 service nova] Acquiring lock "refresh_cache-de2b0206-0c73-4275-89ff-37199520dd71" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.050608] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "492c0fa1-f821-496a-86c2-f7686479a733" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.050992] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "492c0fa1-f821-496a-86c2-f7686479a733" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.104421] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166740, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.125344] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5230f135-a6a1-d431-502b-fdc98e74ecae, 'name': SearchDatastore_Task, 'duration_secs': 0.024759} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.126118] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fc2fc0e-de56-448e-83f0-03fbcdf0cfb0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.130804] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Waiting for the task: (returnval){ [ 709.130804] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52997b44-a8d6-cfc7-f6ea-de7945534798" [ 709.130804] env[68569]: _type = "Task" [ 709.130804] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.137909] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52997b44-a8d6-cfc7-f6ea-de7945534798, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.282780] env[68569]: DEBUG nova.network.neutron [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.383204] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166739, 'name': Destroy_Task, 'duration_secs': 0.932407} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.383473] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Destroyed the VM [ 709.383704] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 709.383942] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9630cd68-67aa-44ee-99fb-e549d1c96c14 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.390487] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 709.390487] env[68569]: value = "task-3166741" [ 709.390487] env[68569]: _type = "Task" [ 709.390487] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.400996] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166741, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.438897] env[68569]: DEBUG nova.network.neutron [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Updating instance_info_cache with network_info: [{"id": "502ae245-07dd-43f0-a1dc-c733e5f5cd3a", "address": "fa:16:3e:39:3c:0e", "network": {"id": "94e283f4-28e6-4cd4-b6f6-50bce5946d26", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1348032782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c45df010dfc40089844060df429bb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap502ae245-07", "ovs_interfaceid": "502ae245-07dd-43f0-a1dc-c733e5f5cd3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.452830] env[68569]: DEBUG nova.scheduler.client.report [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 709.499643] env[68569]: DEBUG nova.compute.manager [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Stashing vm_state: active {{(pid=68569) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 709.608416] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166740, 'name': ReconfigVM_Task, 'duration_secs': 0.710961} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.608754] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Reconfigured VM instance instance-00000021 to attach disk [datastore2] 6606b921-4f3a-44f5-ae4e-c600f26876fc/6606b921-4f3a-44f5-ae4e-c600f26876fc.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 709.609424] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33f68a9b-d842-494c-ae39-8fbb39473f3e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.616686] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 709.616686] env[68569]: value = "task-3166742" [ 709.616686] env[68569]: _type = "Task" [ 709.616686] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.625641] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166742, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.641185] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52997b44-a8d6-cfc7-f6ea-de7945534798, 'name': SearchDatastore_Task, 'duration_secs': 0.009588} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.641459] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.641720] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c/2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 709.641998] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d00ff3c-fec6-44d6-bd86-88547bfe284d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.647952] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Waiting for the task: (returnval){ [ 709.647952] env[68569]: value = "task-3166743" [ 709.647952] env[68569]: _type = "Task" [ 709.647952] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.656350] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': task-3166743, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.904137] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166741, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.941908] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Releasing lock "refresh_cache-de2b0206-0c73-4275-89ff-37199520dd71" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.942282] env[68569]: DEBUG nova.compute.manager [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Instance network_info: |[{"id": "502ae245-07dd-43f0-a1dc-c733e5f5cd3a", "address": "fa:16:3e:39:3c:0e", "network": {"id": "94e283f4-28e6-4cd4-b6f6-50bce5946d26", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1348032782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c45df010dfc40089844060df429bb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap502ae245-07", "ovs_interfaceid": "502ae245-07dd-43f0-a1dc-c733e5f5cd3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 709.942588] env[68569]: DEBUG oslo_concurrency.lockutils [req-1458ebcc-91d4-41fe-b31e-8c8b1e3cf771 req-1eadc61d-2821-4494-a8a7-872615091b19 service nova] Acquired lock "refresh_cache-de2b0206-0c73-4275-89ff-37199520dd71" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.942772] env[68569]: DEBUG nova.network.neutron [req-1458ebcc-91d4-41fe-b31e-8c8b1e3cf771 req-1eadc61d-2821-4494-a8a7-872615091b19 service nova] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Refreshing network info cache for port 502ae245-07dd-43f0-a1dc-c733e5f5cd3a {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 709.944069] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:3c:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '502ae245-07dd-43f0-a1dc-c733e5f5cd3a', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 709.951905] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Creating folder: Project (5c45df010dfc40089844060df429bb31). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 709.953087] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a321343-538c-495e-8e44-75982dbbf52d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.957399] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.158s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.959719] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.284s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.959934] env[68569]: DEBUG nova.objects.instance [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lazy-loading 'resources' on Instance uuid 239c1217-ba8e-474a-b02c-7d85e3ac92f4 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 709.963590] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Created folder: Project (5c45df010dfc40089844060df429bb31) in parent group-v633430. [ 709.963712] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Creating folder: Instances. Parent ref: group-v633534. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 709.963997] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5a9f6ce-fa1a-4899-a32f-f238033b3659 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.974235] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Created folder: Instances in parent group-v633534. [ 709.974523] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 709.974725] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 709.974944] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-205f14a5-65cc-458b-9ec0-eed44c83ada7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.990664] env[68569]: INFO nova.scheduler.client.report [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Deleted allocations for instance f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc [ 709.998649] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 709.998649] env[68569]: value = "task-3166746" [ 709.998649] env[68569]: _type = "Task" [ 709.998649] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.012078] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166746, 'name': CreateVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.017846] env[68569]: DEBUG oslo_concurrency.lockutils [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.128138] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166742, 'name': Rename_Task, 'duration_secs': 0.185898} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.128657] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 710.129053] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8c55a9d-984f-4dde-a277-c2cb48c84ce6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.135325] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 710.135325] env[68569]: value = "task-3166747" [ 710.135325] env[68569]: _type = "Task" [ 710.135325] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.143672] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166747, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.156828] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': task-3166743, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.46168} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.158023] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c/2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 710.158023] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 710.158023] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a5635ee1-63e0-4339-a5d9-d98924ec413e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.163877] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Waiting for the task: (returnval){ [ 710.163877] env[68569]: value = "task-3166748" [ 710.163877] env[68569]: _type = "Task" [ 710.163877] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.172442] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': task-3166748, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.402495] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166741, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.500434] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d6e1b90-d976-40e7-a4cf-9565530b3472 tempest-InstanceActionsNegativeTestJSON-1906875428 tempest-InstanceActionsNegativeTestJSON-1906875428-project-member] Lock "f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.910s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.511130] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166746, 'name': CreateVM_Task, 'duration_secs': 0.333206} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.514411] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 710.515010] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.515180] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.515491] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 710.516304] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7111ac4a-63b7-4783-bbe6-d14a41f01d5d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.521718] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 710.521718] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52cd393a-a3b2-e95c-3088-bcb39addc338" [ 710.521718] env[68569]: _type = "Task" [ 710.521718] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.534818] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52cd393a-a3b2-e95c-3088-bcb39addc338, 'name': SearchDatastore_Task, 'duration_secs': 0.010167} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.535105] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.535333] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 710.535596] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.535701] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.535879] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 710.536149] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8f0a2c7-9f65-4fbd-9516-45f09c9d4174 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.548571] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 710.548763] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 710.549559] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3858fb5e-753c-44d6-9d16-7f3ac381ef1b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.558831] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 710.558831] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bf984c-4a5f-5fab-61a8-0765e98cb72a" [ 710.558831] env[68569]: _type = "Task" [ 710.558831] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.570212] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bf984c-4a5f-5fab-61a8-0765e98cb72a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.645396] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166747, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.673974] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': task-3166748, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06459} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.676573] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 710.677686] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09537407-d7e9-4ce7-8a9f-a20259e7cc34 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.700548] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c/2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 710.701588] env[68569]: DEBUG nova.network.neutron [req-1458ebcc-91d4-41fe-b31e-8c8b1e3cf771 req-1eadc61d-2821-4494-a8a7-872615091b19 service nova] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Updated VIF entry in instance network info cache for port 502ae245-07dd-43f0-a1dc-c733e5f5cd3a. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 710.701910] env[68569]: DEBUG nova.network.neutron [req-1458ebcc-91d4-41fe-b31e-8c8b1e3cf771 req-1eadc61d-2821-4494-a8a7-872615091b19 service nova] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Updating instance_info_cache with network_info: [{"id": "502ae245-07dd-43f0-a1dc-c733e5f5cd3a", "address": "fa:16:3e:39:3c:0e", "network": {"id": "94e283f4-28e6-4cd4-b6f6-50bce5946d26", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1348032782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c45df010dfc40089844060df429bb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap502ae245-07", "ovs_interfaceid": "502ae245-07dd-43f0-a1dc-c733e5f5cd3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.706224] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-408de79f-1651-4609-8b10-dac5ad92567c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.721842] env[68569]: DEBUG oslo_concurrency.lockutils [req-1458ebcc-91d4-41fe-b31e-8c8b1e3cf771 req-1eadc61d-2821-4494-a8a7-872615091b19 service nova] Releasing lock "refresh_cache-de2b0206-0c73-4275-89ff-37199520dd71" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.727839] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Waiting for the task: (returnval){ [ 710.727839] env[68569]: value = "task-3166749" [ 710.727839] env[68569]: _type = "Task" [ 710.727839] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.738922] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': task-3166749, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.905489] env[68569]: DEBUG oslo_vmware.api [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166741, 'name': RemoveSnapshot_Task, 'duration_secs': 1.185441} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.905817] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 710.906103] env[68569]: INFO nova.compute.manager [None req-dcca6cbd-622c-477a-955c-323d210ebce6 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Took 16.74 seconds to snapshot the instance on the hypervisor. [ 711.010776] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2317339-7094-4d53-beee-97ec025ab081 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.019202] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d55761dc-a18b-4957-8ea9-ce877345a056 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.050209] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1436c4-2d0b-4936-9097-56eeaf4250ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.058192] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a183631-ce42-4bd3-937f-1c80aca5f2a8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.070429] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bf984c-4a5f-5fab-61a8-0765e98cb72a, 'name': SearchDatastore_Task, 'duration_secs': 0.011211} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.079377] env[68569]: DEBUG nova.compute.provider_tree [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 711.080726] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6eee4867-496d-4e85-90c7-19c37df79398 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.085842] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 711.085842] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ed8eb0-2f0a-2400-791c-d915d3d722b6" [ 711.085842] env[68569]: _type = "Task" [ 711.085842] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.094438] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ed8eb0-2f0a-2400-791c-d915d3d722b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.146026] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166747, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.240665] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': task-3166749, 'name': ReconfigVM_Task, 'duration_secs': 0.271527} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.241161] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Reconfigured VM instance instance-00000022 to attach disk [datastore1] 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c/2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 711.242089] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f088c06d-2fe5-42ce-b3e3-9c9c81539283 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.251641] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Waiting for the task: (returnval){ [ 711.251641] env[68569]: value = "task-3166750" [ 711.251641] env[68569]: _type = "Task" [ 711.251641] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.260536] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': task-3166750, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.584387] env[68569]: DEBUG nova.scheduler.client.report [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 711.601653] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ed8eb0-2f0a-2400-791c-d915d3d722b6, 'name': SearchDatastore_Task, 'duration_secs': 0.009246} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.601943] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.602251] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] de2b0206-0c73-4275-89ff-37199520dd71/de2b0206-0c73-4275-89ff-37199520dd71.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 711.602527] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2a22122-1da7-444b-8406-ab2c28845724 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.610053] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 711.610053] env[68569]: value = "task-3166751" [ 711.610053] env[68569]: _type = "Task" [ 711.610053] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.619284] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166751, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.645976] env[68569]: DEBUG oslo_vmware.api [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166747, 'name': PowerOnVM_Task, 'duration_secs': 1.261448} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.645976] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 711.646854] env[68569]: INFO nova.compute.manager [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Took 9.43 seconds to spawn the instance on the hypervisor. [ 711.646854] env[68569]: DEBUG nova.compute.manager [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 711.647632] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39db1318-feaa-4135-96e5-be0cafe7a8be {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.762831] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': task-3166750, 'name': Rename_Task, 'duration_secs': 0.140606} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.763330] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 711.763603] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42143a31-fb6f-4d03-b1b7-7242a6849133 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.771143] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Waiting for the task: (returnval){ [ 711.771143] env[68569]: value = "task-3166752" [ 711.771143] env[68569]: _type = "Task" [ 711.771143] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.783119] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': task-3166752, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.096513] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.137s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.099274] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.003s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.102081] env[68569]: INFO nova.compute.claims [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 712.121446] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166751, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500107} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.121718] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] de2b0206-0c73-4275-89ff-37199520dd71/de2b0206-0c73-4275-89ff-37199520dd71.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 712.121922] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 712.122624] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-94553303-3fcb-4e12-91e3-af579d7fbec2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.125788] env[68569]: INFO nova.scheduler.client.report [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Deleted allocations for instance 239c1217-ba8e-474a-b02c-7d85e3ac92f4 [ 712.133586] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 712.133586] env[68569]: value = "task-3166753" [ 712.133586] env[68569]: _type = "Task" [ 712.133586] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.148510] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166753, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.168712] env[68569]: INFO nova.compute.manager [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Took 44.33 seconds to build instance. [ 712.288923] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': task-3166752, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.645185] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aea0da6d-025f-4b8b-a67b-1cf386f0a0d5 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "239c1217-ba8e-474a-b02c-7d85e3ac92f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.434s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.649495] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166753, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062719} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.649773] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 712.650536] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e847d182-16fe-4a90-8932-e7857da6283d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.677650] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] de2b0206-0c73-4275-89ff-37199520dd71/de2b0206-0c73-4275-89ff-37199520dd71.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 712.680029] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd11d003-dffd-426c-b317-b1c23f06ea91 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "6606b921-4f3a-44f5-ae4e-c600f26876fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.232s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.680029] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04b5135c-5b22-46ae-a796-f7ec0006cb77 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.704283] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 712.704283] env[68569]: value = "task-3166754" [ 712.704283] env[68569]: _type = "Task" [ 712.704283] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.716135] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166754, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.784465] env[68569]: DEBUG oslo_vmware.api [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': task-3166752, 'name': PowerOnVM_Task, 'duration_secs': 0.939709} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.784755] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 712.784982] env[68569]: INFO nova.compute.manager [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Took 7.86 seconds to spawn the instance on the hypervisor. [ 712.785189] env[68569]: DEBUG nova.compute.manager [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 712.786195] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42576ce-f07c-491e-825d-22ce4656d18d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.914577] env[68569]: DEBUG nova.compute.manager [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 712.914793] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2233a665-51bb-4563-8ebf-833cdadfd9bc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.005247] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.005451] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.197796] env[68569]: DEBUG nova.compute.manager [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 713.215069] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166754, 'name': ReconfigVM_Task, 'duration_secs': 0.297933} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.215757] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Reconfigured VM instance instance-00000023 to attach disk [datastore1] de2b0206-0c73-4275-89ff-37199520dd71/de2b0206-0c73-4275-89ff-37199520dd71.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 713.215958] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-195b2a8f-7054-4e5f-bfdd-5e4fac148c42 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.224313] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 713.224313] env[68569]: value = "task-3166755" [ 713.224313] env[68569]: _type = "Task" [ 713.224313] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.236069] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166755, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.310770] env[68569]: INFO nova.compute.manager [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Took 44.38 seconds to build instance. [ 713.428059] env[68569]: INFO nova.compute.manager [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] instance snapshotting [ 713.433784] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef693b8-d8a0-48c0-a8ec-e4b0705e73ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.457994] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3689f67f-2317-4d10-8daa-15a856cb29f3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.511435] env[68569]: DEBUG nova.compute.utils [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 713.716310] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b299625-f64f-4312-a4d9-625637b841fb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.719721] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.724959] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a3fe62-deda-467b-a400-56b82d03a1a7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.761929] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e515f5b-e9f3-4c20-9474-7a802d5d32cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.764557] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166755, 'name': Rename_Task, 'duration_secs': 0.136216} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.764823] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 713.765463] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95d21d9c-2797-41c7-bab6-b68767e59d2a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.770336] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5a9223-b0b8-4719-abf6-2cfc978d5f27 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.775075] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 713.775075] env[68569]: value = "task-3166756" [ 713.775075] env[68569]: _type = "Task" [ 713.775075] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.786348] env[68569]: DEBUG nova.compute.provider_tree [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.790809] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166756, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.813365] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d26d42cb-b57f-4385-ba8d-e24db99e0bf0 tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Lock "2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.536s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.972357] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 713.972357] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-89f22267-724b-475b-a590-f22349554a7a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.979487] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 713.979487] env[68569]: value = "task-3166757" [ 713.979487] env[68569]: _type = "Task" [ 713.979487] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.989185] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166757, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.016639] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.146251] env[68569]: DEBUG nova.compute.manager [req-4a902579-ccd9-42a2-8118-1b7e59d4aa2b req-c3cfb41d-a316-4816-bc77-dc2a70c6da16 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Received event network-changed-9b282c4e-170f-4f30-8c96-1a9b64168c47 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 714.146495] env[68569]: DEBUG nova.compute.manager [req-4a902579-ccd9-42a2-8118-1b7e59d4aa2b req-c3cfb41d-a316-4816-bc77-dc2a70c6da16 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Refreshing instance network info cache due to event network-changed-9b282c4e-170f-4f30-8c96-1a9b64168c47. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 714.147875] env[68569]: DEBUG oslo_concurrency.lockutils [req-4a902579-ccd9-42a2-8118-1b7e59d4aa2b req-c3cfb41d-a316-4816-bc77-dc2a70c6da16 service nova] Acquiring lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.147875] env[68569]: DEBUG oslo_concurrency.lockutils [req-4a902579-ccd9-42a2-8118-1b7e59d4aa2b req-c3cfb41d-a316-4816-bc77-dc2a70c6da16 service nova] Acquired lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.147875] env[68569]: DEBUG nova.network.neutron [req-4a902579-ccd9-42a2-8118-1b7e59d4aa2b req-c3cfb41d-a316-4816-bc77-dc2a70c6da16 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Refreshing network info cache for port 9b282c4e-170f-4f30-8c96-1a9b64168c47 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 714.285981] env[68569]: DEBUG oslo_vmware.api [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166756, 'name': PowerOnVM_Task, 'duration_secs': 0.482486} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.286335] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 714.286425] env[68569]: INFO nova.compute.manager [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Took 6.66 seconds to spawn the instance on the hypervisor. [ 714.286620] env[68569]: DEBUG nova.compute.manager [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 714.287416] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcb843a-98f9-4c3d-8f2a-355b35c73ceb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.292374] env[68569]: DEBUG nova.scheduler.client.report [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 714.317058] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 714.489716] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166757, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.801372] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.702s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.801961] env[68569]: DEBUG nova.compute.manager [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 714.808081] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.006s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.812023] env[68569]: DEBUG nova.objects.instance [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Lazy-loading 'resources' on Instance uuid a29854f9-0096-4b01-9350-bfddee84e2c2 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 714.815998] env[68569]: INFO nova.compute.manager [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Took 45.20 seconds to build instance. [ 714.841992] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.913337] env[68569]: DEBUG nova.network.neutron [req-4a902579-ccd9-42a2-8118-1b7e59d4aa2b req-c3cfb41d-a316-4816-bc77-dc2a70c6da16 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Updated VIF entry in instance network info cache for port 9b282c4e-170f-4f30-8c96-1a9b64168c47. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 714.913337] env[68569]: DEBUG nova.network.neutron [req-4a902579-ccd9-42a2-8118-1b7e59d4aa2b req-c3cfb41d-a316-4816-bc77-dc2a70c6da16 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Updating instance_info_cache with network_info: [{"id": "9b282c4e-170f-4f30-8c96-1a9b64168c47", "address": "fa:16:3e:e5:c7:1e", "network": {"id": "a2e78ea6-bf69-4312-abcf-eb606f96f132", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1264634806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d69fb55927344ad96aa0bf26f8f230e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b282c4e-17", "ovs_interfaceid": "9b282c4e-170f-4f30-8c96-1a9b64168c47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.992328] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166757, 'name': CreateSnapshot_Task, 'duration_secs': 0.963908} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.992328] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 714.992328] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac044383-eacd-4fff-943c-de7637da5bd1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.088969] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.088969] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.088969] env[68569]: INFO nova.compute.manager [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Attaching volume a00e4c14-852f-4ac3-9275-c48328adff12 to /dev/sdb [ 715.100886] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.100886] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.138048] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb5c1a8e-63e1-4e11-9f61-38f0ec6271b0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.146474] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0c275a-7c4d-43e6-ba63-17b61cd1e868 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.160512] env[68569]: DEBUG nova.virt.block_device [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Updating existing volume attachment record: d1997044-90d2-403d-8d01-ec91f385ccc5 {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 715.315054] env[68569]: DEBUG nova.compute.utils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 715.322998] env[68569]: DEBUG nova.compute.manager [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 715.322998] env[68569]: DEBUG nova.network.neutron [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 715.325515] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aed1ca54-b5a7-43a5-b11b-633825176f14 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "de2b0206-0c73-4275-89ff-37199520dd71" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.116s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.381363] env[68569]: DEBUG nova.policy [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07f230a9b5d34fa088d03ebaeac27644', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c45df010dfc40089844060df429bb31', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 715.417399] env[68569]: DEBUG oslo_concurrency.lockutils [req-4a902579-ccd9-42a2-8118-1b7e59d4aa2b req-c3cfb41d-a316-4816-bc77-dc2a70c6da16 service nova] Releasing lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.515349] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 715.515922] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Acquiring lock "2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.516261] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Lock "2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.516554] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Acquiring lock "2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.516818] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Lock "2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.517108] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Lock "2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.520611] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f9a4fb0d-ebd5-41b0-85ee-0d76b123358b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.528147] env[68569]: INFO nova.compute.manager [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Terminating instance [ 715.536748] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 715.536748] env[68569]: value = "task-3166761" [ 715.536748] env[68569]: _type = "Task" [ 715.536748] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.552832] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166761, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.702496] env[68569]: DEBUG nova.compute.manager [req-136600e1-1512-48cb-ab18-3e6420b20172 req-3c6a7945-18a2-4a95-bc81-47894e2d5fb0 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Received event network-changed-9b282c4e-170f-4f30-8c96-1a9b64168c47 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 715.702688] env[68569]: DEBUG nova.compute.manager [req-136600e1-1512-48cb-ab18-3e6420b20172 req-3c6a7945-18a2-4a95-bc81-47894e2d5fb0 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Refreshing instance network info cache due to event network-changed-9b282c4e-170f-4f30-8c96-1a9b64168c47. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 715.702898] env[68569]: DEBUG oslo_concurrency.lockutils [req-136600e1-1512-48cb-ab18-3e6420b20172 req-3c6a7945-18a2-4a95-bc81-47894e2d5fb0 service nova] Acquiring lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.703048] env[68569]: DEBUG oslo_concurrency.lockutils [req-136600e1-1512-48cb-ab18-3e6420b20172 req-3c6a7945-18a2-4a95-bc81-47894e2d5fb0 service nova] Acquired lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.703235] env[68569]: DEBUG nova.network.neutron [req-136600e1-1512-48cb-ab18-3e6420b20172 req-3c6a7945-18a2-4a95-bc81-47894e2d5fb0 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Refreshing network info cache for port 9b282c4e-170f-4f30-8c96-1a9b64168c47 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 715.796863] env[68569]: DEBUG nova.network.neutron [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Successfully created port: 77dcd96c-5db6-4974-8c32-59be867fda30 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 715.823282] env[68569]: DEBUG nova.compute.manager [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 715.831499] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 715.942747] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87cce6f9-d569-4539-8679-63efe77712c7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.951889] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6e6b47-52f8-47e0-851e-7487d6e977b1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.988842] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8f1679-eac6-4b33-b8a3-6ea53ddbad13 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.997541] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c874eacd-fc84-4066-a273-0c18205fda74 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.012626] env[68569]: DEBUG nova.compute.provider_tree [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.034050] env[68569]: DEBUG nova.compute.manager [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 716.034283] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 716.035137] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fefb1fdd-396e-49c5-9ddd-e16699ca64b7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.047281] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166761, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.049375] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 716.049619] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4dcd1251-6a02-4a79-88dc-39b2d33166b0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.055679] env[68569]: DEBUG oslo_vmware.api [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Waiting for the task: (returnval){ [ 716.055679] env[68569]: value = "task-3166762" [ 716.055679] env[68569]: _type = "Task" [ 716.055679] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.063749] env[68569]: DEBUG oslo_vmware.api [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': task-3166762, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.358296] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.497981] env[68569]: DEBUG nova.network.neutron [req-136600e1-1512-48cb-ab18-3e6420b20172 req-3c6a7945-18a2-4a95-bc81-47894e2d5fb0 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Updated VIF entry in instance network info cache for port 9b282c4e-170f-4f30-8c96-1a9b64168c47. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 716.497981] env[68569]: DEBUG nova.network.neutron [req-136600e1-1512-48cb-ab18-3e6420b20172 req-3c6a7945-18a2-4a95-bc81-47894e2d5fb0 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Updating instance_info_cache with network_info: [{"id": "9b282c4e-170f-4f30-8c96-1a9b64168c47", "address": "fa:16:3e:e5:c7:1e", "network": {"id": "a2e78ea6-bf69-4312-abcf-eb606f96f132", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1264634806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d69fb55927344ad96aa0bf26f8f230e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b282c4e-17", "ovs_interfaceid": "9b282c4e-170f-4f30-8c96-1a9b64168c47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.517292] env[68569]: DEBUG nova.scheduler.client.report [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 716.549210] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166761, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.564988] env[68569]: DEBUG oslo_vmware.api [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': task-3166762, 'name': PowerOffVM_Task, 'duration_secs': 0.230602} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.565261] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 716.565450] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 716.565709] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75edd941-20e0-4bb6-bbaf-b57450c2d18b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.625806] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 716.626025] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 716.626323] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Deleting the datastore file [datastore1] 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 716.626628] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-577e91cc-1aed-4365-8fa1-c9d04feb94f6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.633325] env[68569]: DEBUG oslo_vmware.api [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Waiting for the task: (returnval){ [ 716.633325] env[68569]: value = "task-3166764" [ 716.633325] env[68569]: _type = "Task" [ 716.633325] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.641469] env[68569]: DEBUG oslo_vmware.api [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': task-3166764, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.840101] env[68569]: DEBUG nova.compute.manager [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 716.868279] env[68569]: DEBUG nova.virt.hardware [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 716.868549] env[68569]: DEBUG nova.virt.hardware [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 716.868705] env[68569]: DEBUG nova.virt.hardware [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 716.868886] env[68569]: DEBUG nova.virt.hardware [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 716.869084] env[68569]: DEBUG nova.virt.hardware [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 716.869255] env[68569]: DEBUG nova.virt.hardware [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 716.869481] env[68569]: DEBUG nova.virt.hardware [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 716.869671] env[68569]: DEBUG nova.virt.hardware [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 716.869859] env[68569]: DEBUG nova.virt.hardware [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 716.870466] env[68569]: DEBUG nova.virt.hardware [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 716.870466] env[68569]: DEBUG nova.virt.hardware [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 716.871090] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11521b5c-adad-4083-a795-02148bfe4c1e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.879738] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e94dd97-011e-4bd3-8b3c-8638eaa78937 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.000411] env[68569]: DEBUG oslo_concurrency.lockutils [req-136600e1-1512-48cb-ab18-3e6420b20172 req-3c6a7945-18a2-4a95-bc81-47894e2d5fb0 service nova] Releasing lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.020743] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.213s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.027692] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.749s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.027692] env[68569]: INFO nova.compute.claims [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 717.045668] env[68569]: INFO nova.scheduler.client.report [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Deleted allocations for instance a29854f9-0096-4b01-9350-bfddee84e2c2 [ 717.054706] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166761, 'name': CloneVM_Task, 'duration_secs': 1.416415} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.058032] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Created linked-clone VM from snapshot [ 717.058731] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab5f3109-7eba-41fa-9981-7fab952cc304 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.070127] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Uploading image 715d763c-7d6b-40b5-8da3-e87ce93cb79e {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 717.097380] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 717.097380] env[68569]: value = "vm-633540" [ 717.097380] env[68569]: _type = "VirtualMachine" [ 717.097380] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 717.097668] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-38556b61-9b73-4b8c-8ced-0d79500a3517 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.105808] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Lease: (returnval){ [ 717.105808] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e07aa8-11f5-2558-d805-a97c227bfd93" [ 717.105808] env[68569]: _type = "HttpNfcLease" [ 717.105808] env[68569]: } obtained for exporting VM: (result){ [ 717.105808] env[68569]: value = "vm-633540" [ 717.105808] env[68569]: _type = "VirtualMachine" [ 717.105808] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 717.106122] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the lease: (returnval){ [ 717.106122] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e07aa8-11f5-2558-d805-a97c227bfd93" [ 717.106122] env[68569]: _type = "HttpNfcLease" [ 717.106122] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 717.113590] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 717.113590] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e07aa8-11f5-2558-d805-a97c227bfd93" [ 717.113590] env[68569]: _type = "HttpNfcLease" [ 717.113590] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 717.146429] env[68569]: DEBUG oslo_vmware.api [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Task: {'id': task-3166764, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137996} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.147031] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 717.147031] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 717.147148] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 717.147283] env[68569]: INFO nova.compute.manager [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 717.151020] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 717.151020] env[68569]: DEBUG nova.compute.manager [-] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 717.151020] env[68569]: DEBUG nova.network.neutron [-] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 717.403730] env[68569]: DEBUG nova.compute.manager [req-d3a9fb86-4b06-402c-8349-04d67da9f7e0 req-d4ac52eb-5bda-48a5-9ad3-1b5dc3649088 service nova] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Received event network-vif-plugged-77dcd96c-5db6-4974-8c32-59be867fda30 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 717.403995] env[68569]: DEBUG oslo_concurrency.lockutils [req-d3a9fb86-4b06-402c-8349-04d67da9f7e0 req-d4ac52eb-5bda-48a5-9ad3-1b5dc3649088 service nova] Acquiring lock "eec09a1c-e8b2-4b6a-9545-e190e1f965d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.404176] env[68569]: DEBUG oslo_concurrency.lockutils [req-d3a9fb86-4b06-402c-8349-04d67da9f7e0 req-d4ac52eb-5bda-48a5-9ad3-1b5dc3649088 service nova] Lock "eec09a1c-e8b2-4b6a-9545-e190e1f965d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.404340] env[68569]: DEBUG oslo_concurrency.lockutils [req-d3a9fb86-4b06-402c-8349-04d67da9f7e0 req-d4ac52eb-5bda-48a5-9ad3-1b5dc3649088 service nova] Lock "eec09a1c-e8b2-4b6a-9545-e190e1f965d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.404504] env[68569]: DEBUG nova.compute.manager [req-d3a9fb86-4b06-402c-8349-04d67da9f7e0 req-d4ac52eb-5bda-48a5-9ad3-1b5dc3649088 service nova] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] No waiting events found dispatching network-vif-plugged-77dcd96c-5db6-4974-8c32-59be867fda30 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 717.404737] env[68569]: WARNING nova.compute.manager [req-d3a9fb86-4b06-402c-8349-04d67da9f7e0 req-d4ac52eb-5bda-48a5-9ad3-1b5dc3649088 service nova] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Received unexpected event network-vif-plugged-77dcd96c-5db6-4974-8c32-59be867fda30 for instance with vm_state building and task_state spawning. [ 717.562499] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04545124-6d2b-4538-92ca-8ae3af581e12 tempest-DeleteServersAdminTestJSON-268117276 tempest-DeleteServersAdminTestJSON-268117276-project-member] Lock "a29854f9-0096-4b01-9350-bfddee84e2c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.415s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.615986] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 717.615986] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e07aa8-11f5-2558-d805-a97c227bfd93" [ 717.615986] env[68569]: _type = "HttpNfcLease" [ 717.615986] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 717.616245] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 717.616245] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e07aa8-11f5-2558-d805-a97c227bfd93" [ 717.616245] env[68569]: _type = "HttpNfcLease" [ 717.616245] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 717.617341] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47d852a-c846-4125-802d-bc3bb0f31fa0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.624196] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52763392-36c1-9e2c-1f61-7a8c4b02ac24/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 717.624372] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52763392-36c1-9e2c-1f61-7a8c4b02ac24/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 717.762347] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3209d190-fdbc-4e4b-9bc6-c38867cf24dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.765486] env[68569]: DEBUG nova.network.neutron [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Successfully updated port: 77dcd96c-5db6-4974-8c32-59be867fda30 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 717.802446] env[68569]: DEBUG nova.compute.manager [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Received event network-changed-6bb42ed5-e951-4b70-af61-7b4cb927ce5f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 717.802665] env[68569]: DEBUG nova.compute.manager [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Refreshing instance network info cache due to event network-changed-6bb42ed5-e951-4b70-af61-7b4cb927ce5f. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 717.802860] env[68569]: DEBUG oslo_concurrency.lockutils [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] Acquiring lock "refresh_cache-6606b921-4f3a-44f5-ae4e-c600f26876fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.802995] env[68569]: DEBUG oslo_concurrency.lockutils [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] Acquired lock "refresh_cache-6606b921-4f3a-44f5-ae4e-c600f26876fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.803184] env[68569]: DEBUG nova.network.neutron [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Refreshing network info cache for port 6bb42ed5-e951-4b70-af61-7b4cb927ce5f {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 717.827160] env[68569]: DEBUG oslo_concurrency.lockutils [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "6606b921-4f3a-44f5-ae4e-c600f26876fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.827427] env[68569]: DEBUG oslo_concurrency.lockutils [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "6606b921-4f3a-44f5-ae4e-c600f26876fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.827668] env[68569]: DEBUG oslo_concurrency.lockutils [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "6606b921-4f3a-44f5-ae4e-c600f26876fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.827872] env[68569]: DEBUG oslo_concurrency.lockutils [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "6606b921-4f3a-44f5-ae4e-c600f26876fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.828062] env[68569]: DEBUG oslo_concurrency.lockutils [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "6606b921-4f3a-44f5-ae4e-c600f26876fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.831317] env[68569]: INFO nova.compute.manager [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Terminating instance [ 718.244362] env[68569]: DEBUG nova.network.neutron [-] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.269581] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "refresh_cache-eec09a1c-e8b2-4b6a-9545-e190e1f965d1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.269581] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquired lock "refresh_cache-eec09a1c-e8b2-4b6a-9545-e190e1f965d1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.269766] env[68569]: DEBUG nova.network.neutron [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 718.334922] env[68569]: DEBUG nova.compute.manager [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 718.334922] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 718.339288] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebbdec3d-b101-4afd-8dfe-95c3849b0240 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.348025] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 718.348473] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be5d98d4-2582-4664-95a2-1d6a5d9c462b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.360792] env[68569]: DEBUG oslo_vmware.api [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 718.360792] env[68569]: value = "task-3166767" [ 718.360792] env[68569]: _type = "Task" [ 718.360792] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.372242] env[68569]: DEBUG oslo_vmware.api [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.693193] env[68569]: DEBUG nova.network.neutron [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Updated VIF entry in instance network info cache for port 6bb42ed5-e951-4b70-af61-7b4cb927ce5f. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 718.693658] env[68569]: DEBUG nova.network.neutron [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Updating instance_info_cache with network_info: [{"id": "6bb42ed5-e951-4b70-af61-7b4cb927ce5f", "address": "fa:16:3e:a2:3b:71", "network": {"id": "a2e78ea6-bf69-4312-abcf-eb606f96f132", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1264634806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d69fb55927344ad96aa0bf26f8f230e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bb42ed5-e9", "ovs_interfaceid": "6bb42ed5-e951-4b70-af61-7b4cb927ce5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.718381] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc6326c-dc1e-417d-b780-8c5a23d96aae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.727170] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad5a08b-4754-44bf-9155-49fd0ec25154 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.760549] env[68569]: INFO nova.compute.manager [-] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Took 1.61 seconds to deallocate network for instance. [ 718.763711] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be36557-c502-4c90-b8cd-2aa6c2ea0079 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.781275] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d3ad59-ea88-4ad4-9ffd-f75642408201 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.799239] env[68569]: DEBUG nova.compute.provider_tree [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.818946] env[68569]: DEBUG nova.network.neutron [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 718.871222] env[68569]: DEBUG oslo_vmware.api [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166767, 'name': PowerOffVM_Task, 'duration_secs': 0.274398} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.871620] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 718.871828] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 718.872140] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8537f5c0-2722-402e-872d-ff34e82a2227 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.934550] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 718.935095] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 718.935354] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Deleting the datastore file [datastore2] 6606b921-4f3a-44f5-ae4e-c600f26876fc {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 718.935735] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a9fefdd1-28e2-4d1f-88c7-6b71722ad76f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.944945] env[68569]: DEBUG oslo_vmware.api [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 718.944945] env[68569]: value = "task-3166769" [ 718.944945] env[68569]: _type = "Task" [ 718.944945] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.950859] env[68569]: DEBUG oslo_vmware.api [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166769, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.045272] env[68569]: DEBUG nova.network.neutron [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Updating instance_info_cache with network_info: [{"id": "77dcd96c-5db6-4974-8c32-59be867fda30", "address": "fa:16:3e:e0:87:af", "network": {"id": "94e283f4-28e6-4cd4-b6f6-50bce5946d26", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1348032782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c45df010dfc40089844060df429bb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77dcd96c-5d", "ovs_interfaceid": "77dcd96c-5db6-4974-8c32-59be867fda30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.196992] env[68569]: DEBUG oslo_concurrency.lockutils [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] Releasing lock "refresh_cache-6606b921-4f3a-44f5-ae4e-c600f26876fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 719.197372] env[68569]: DEBUG nova.compute.manager [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Received event network-changed-6bb42ed5-e951-4b70-af61-7b4cb927ce5f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 719.197646] env[68569]: DEBUG nova.compute.manager [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Refreshing instance network info cache due to event network-changed-6bb42ed5-e951-4b70-af61-7b4cb927ce5f. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 719.197950] env[68569]: DEBUG oslo_concurrency.lockutils [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] Acquiring lock "refresh_cache-6606b921-4f3a-44f5-ae4e-c600f26876fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.198190] env[68569]: DEBUG oslo_concurrency.lockutils [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] Acquired lock "refresh_cache-6606b921-4f3a-44f5-ae4e-c600f26876fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.198430] env[68569]: DEBUG nova.network.neutron [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Refreshing network info cache for port 6bb42ed5-e951-4b70-af61-7b4cb927ce5f {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 719.279703] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.302732] env[68569]: DEBUG nova.scheduler.client.report [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 719.442619] env[68569]: DEBUG nova.compute.manager [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Received event network-vif-deleted-848c4111-2c4b-472d-b66f-a7609615ff2d {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 719.442830] env[68569]: DEBUG nova.compute.manager [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Received event network-changed-77dcd96c-5db6-4974-8c32-59be867fda30 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 719.442991] env[68569]: DEBUG nova.compute.manager [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Refreshing instance network info cache due to event network-changed-77dcd96c-5db6-4974-8c32-59be867fda30. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 719.444131] env[68569]: DEBUG oslo_concurrency.lockutils [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] Acquiring lock "refresh_cache-eec09a1c-e8b2-4b6a-9545-e190e1f965d1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.456403] env[68569]: DEBUG oslo_vmware.api [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166769, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148924} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.456646] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 719.456832] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 719.457016] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 719.457187] env[68569]: INFO nova.compute.manager [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Took 1.12 seconds to destroy the instance on the hypervisor. [ 719.457434] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 719.458031] env[68569]: DEBUG nova.compute.manager [-] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 719.458132] env[68569]: DEBUG nova.network.neutron [-] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 719.549438] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Releasing lock "refresh_cache-eec09a1c-e8b2-4b6a-9545-e190e1f965d1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 719.549823] env[68569]: DEBUG nova.compute.manager [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Instance network_info: |[{"id": "77dcd96c-5db6-4974-8c32-59be867fda30", "address": "fa:16:3e:e0:87:af", "network": {"id": "94e283f4-28e6-4cd4-b6f6-50bce5946d26", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1348032782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c45df010dfc40089844060df429bb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77dcd96c-5d", "ovs_interfaceid": "77dcd96c-5db6-4974-8c32-59be867fda30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 719.550465] env[68569]: DEBUG oslo_concurrency.lockutils [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] Acquired lock "refresh_cache-eec09a1c-e8b2-4b6a-9545-e190e1f965d1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.550465] env[68569]: DEBUG nova.network.neutron [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Refreshing network info cache for port 77dcd96c-5db6-4974-8c32-59be867fda30 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 719.558273] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:87:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '77dcd96c-5db6-4974-8c32-59be867fda30', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 719.573623] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 719.574777] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 719.576358] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4fa53dea-38c6-4da9-aea2-e87aea727508 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.599563] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 719.599563] env[68569]: value = "task-3166770" [ 719.599563] env[68569]: _type = "Task" [ 719.599563] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.608708] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166770, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.718313] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 719.718695] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633539', 'volume_id': 'a00e4c14-852f-4ac3-9275-c48328adff12', 'name': 'volume-a00e4c14-852f-4ac3-9275-c48328adff12', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fb97d2dd-d42a-42e8-9a36-5c913a58b891', 'attached_at': '', 'detached_at': '', 'volume_id': 'a00e4c14-852f-4ac3-9275-c48328adff12', 'serial': 'a00e4c14-852f-4ac3-9275-c48328adff12'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 719.719673] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c234f76-64a6-4e03-afd2-c3733b95a72b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.740499] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1ba1aa-4755-45c6-a176-3083adacdcc1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.769817] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] volume-a00e4c14-852f-4ac3-9275-c48328adff12/volume-a00e4c14-852f-4ac3-9275-c48328adff12.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 719.773242] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c51412f5-02c7-4502-b87d-44ba89af47ab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.794420] env[68569]: DEBUG oslo_vmware.api [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 719.794420] env[68569]: value = "task-3166771" [ 719.794420] env[68569]: _type = "Task" [ 719.794420] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.807533] env[68569]: DEBUG oslo_vmware.api [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166771, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.810498] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.787s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.811351] env[68569]: DEBUG nova.compute.manager [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 719.814267] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.427s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.816024] env[68569]: INFO nova.compute.claims [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 720.109551] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166770, 'name': CreateVM_Task, 'duration_secs': 0.3748} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.109819] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 720.110517] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.110645] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.110995] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 720.111265] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2cb259c-4a09-4068-bb99-d16a01262005 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.117302] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 720.117302] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527740d9-15d4-0ad9-f1b0-0f2f9baf7797" [ 720.117302] env[68569]: _type = "Task" [ 720.117302] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.125246] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527740d9-15d4-0ad9-f1b0-0f2f9baf7797, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.215464] env[68569]: DEBUG nova.network.neutron [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Updated VIF entry in instance network info cache for port 6bb42ed5-e951-4b70-af61-7b4cb927ce5f. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 720.216638] env[68569]: DEBUG nova.network.neutron [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Updating instance_info_cache with network_info: [{"id": "6bb42ed5-e951-4b70-af61-7b4cb927ce5f", "address": "fa:16:3e:a2:3b:71", "network": {"id": "a2e78ea6-bf69-4312-abcf-eb606f96f132", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1264634806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d69fb55927344ad96aa0bf26f8f230e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bb42ed5-e9", "ovs_interfaceid": "6bb42ed5-e951-4b70-af61-7b4cb927ce5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.307497] env[68569]: DEBUG oslo_vmware.api [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166771, 'name': ReconfigVM_Task, 'duration_secs': 0.508803} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.307826] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Reconfigured VM instance instance-0000000c to attach disk [datastore2] volume-a00e4c14-852f-4ac3-9275-c48328adff12/volume-a00e4c14-852f-4ac3-9275-c48328adff12.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 720.315317] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59cb2993-d623-4002-8b3d-a6093a065ccd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.329995] env[68569]: DEBUG nova.compute.utils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 720.331681] env[68569]: DEBUG nova.compute.manager [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 720.332152] env[68569]: DEBUG nova.network.neutron [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 720.340334] env[68569]: DEBUG oslo_vmware.api [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 720.340334] env[68569]: value = "task-3166772" [ 720.340334] env[68569]: _type = "Task" [ 720.340334] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.355010] env[68569]: DEBUG oslo_vmware.api [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166772, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.418166] env[68569]: DEBUG nova.policy [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b181af5310c4a59894c3009e4d31691', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c95c0d8931944fd19a165f4a3af3d1fb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 720.423512] env[68569]: DEBUG nova.network.neutron [-] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.484442] env[68569]: DEBUG nova.network.neutron [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Updated VIF entry in instance network info cache for port 77dcd96c-5db6-4974-8c32-59be867fda30. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 720.484894] env[68569]: DEBUG nova.network.neutron [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Updating instance_info_cache with network_info: [{"id": "77dcd96c-5db6-4974-8c32-59be867fda30", "address": "fa:16:3e:e0:87:af", "network": {"id": "94e283f4-28e6-4cd4-b6f6-50bce5946d26", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1348032782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c45df010dfc40089844060df429bb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77dcd96c-5d", "ovs_interfaceid": "77dcd96c-5db6-4974-8c32-59be867fda30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.626748] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527740d9-15d4-0ad9-f1b0-0f2f9baf7797, 'name': SearchDatastore_Task, 'duration_secs': 0.00966} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.627073] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.627314] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 720.627615] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.627734] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.627916] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 720.628194] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42136cb7-ede3-47ff-b415-cc7c938aa4fc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.637263] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 720.637493] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 720.638239] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b264f76d-8057-4061-9be8-7c7867ec2eca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.647580] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 720.647580] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a87b72-5744-2e54-faa9-d48fdf1d9ad4" [ 720.647580] env[68569]: _type = "Task" [ 720.647580] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.665384] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a87b72-5744-2e54-faa9-d48fdf1d9ad4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.720601] env[68569]: DEBUG oslo_concurrency.lockutils [req-106c49f8-b48b-4f62-9976-62a86017b601 req-111fdd75-9c05-4c0b-909c-a8b4dbba3e03 service nova] Releasing lock "refresh_cache-6606b921-4f3a-44f5-ae4e-c600f26876fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.818404] env[68569]: DEBUG nova.network.neutron [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Successfully created port: 2cb53b84-1f50-4d92-95dd-850b9abb24be {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 720.836207] env[68569]: DEBUG nova.compute.manager [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 720.850317] env[68569]: DEBUG oslo_vmware.api [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166772, 'name': ReconfigVM_Task, 'duration_secs': 0.171012} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.850617] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633539', 'volume_id': 'a00e4c14-852f-4ac3-9275-c48328adff12', 'name': 'volume-a00e4c14-852f-4ac3-9275-c48328adff12', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fb97d2dd-d42a-42e8-9a36-5c913a58b891', 'attached_at': '', 'detached_at': '', 'volume_id': 'a00e4c14-852f-4ac3-9275-c48328adff12', 'serial': 'a00e4c14-852f-4ac3-9275-c48328adff12'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 720.928866] env[68569]: INFO nova.compute.manager [-] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Took 1.47 seconds to deallocate network for instance. [ 720.988482] env[68569]: DEBUG oslo_concurrency.lockutils [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] Releasing lock "refresh_cache-eec09a1c-e8b2-4b6a-9545-e190e1f965d1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.991156] env[68569]: DEBUG nova.compute.manager [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Received event network-changed-9b282c4e-170f-4f30-8c96-1a9b64168c47 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 720.991156] env[68569]: DEBUG nova.compute.manager [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Refreshing instance network info cache due to event network-changed-9b282c4e-170f-4f30-8c96-1a9b64168c47. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 720.991156] env[68569]: DEBUG oslo_concurrency.lockutils [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] Acquiring lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.991156] env[68569]: DEBUG oslo_concurrency.lockutils [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] Acquired lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.991156] env[68569]: DEBUG nova.network.neutron [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Refreshing network info cache for port 9b282c4e-170f-4f30-8c96-1a9b64168c47 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 721.160772] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a87b72-5744-2e54-faa9-d48fdf1d9ad4, 'name': SearchDatastore_Task, 'duration_secs': 0.013777} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.164173] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15ac6dff-6ed0-411c-b554-c0b71330e582 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.169338] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 721.169338] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c90799-c581-3004-60ab-136d6f059a79" [ 721.169338] env[68569]: _type = "Task" [ 721.169338] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.180304] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c90799-c581-3004-60ab-136d6f059a79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.427108] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c3149e-0ede-42e8-a4cc-140407e2b118 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.434165] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1ccfe3-f8fe-427a-9f8e-2e8a07166c7a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.439751] env[68569]: DEBUG oslo_concurrency.lockutils [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.469760] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9f4e69-b227-47e7-962c-6a3506850298 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.478420] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee44fff6-9c74-4aba-a076-f7322b1af3b7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.496124] env[68569]: DEBUG nova.compute.provider_tree [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.530677] env[68569]: DEBUG nova.compute.manager [req-e6dc2bac-8daa-4745-a0be-b1eddfe9f4c2 req-36af5529-91dc-4dbe-be1f-710d5add637d service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Received event network-changed-9b282c4e-170f-4f30-8c96-1a9b64168c47 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 721.530873] env[68569]: DEBUG nova.compute.manager [req-e6dc2bac-8daa-4745-a0be-b1eddfe9f4c2 req-36af5529-91dc-4dbe-be1f-710d5add637d service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Refreshing instance network info cache due to event network-changed-9b282c4e-170f-4f30-8c96-1a9b64168c47. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 721.531252] env[68569]: DEBUG oslo_concurrency.lockutils [req-e6dc2bac-8daa-4745-a0be-b1eddfe9f4c2 req-36af5529-91dc-4dbe-be1f-710d5add637d service nova] Acquiring lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.681185] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c90799-c581-3004-60ab-136d6f059a79, 'name': SearchDatastore_Task, 'duration_secs': 0.0099} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.681466] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 721.681757] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] eec09a1c-e8b2-4b6a-9545-e190e1f965d1/eec09a1c-e8b2-4b6a-9545-e190e1f965d1.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 721.682155] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e4ac391-b66b-45b3-8d32-641493d1eaca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.689832] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 721.689832] env[68569]: value = "task-3166773" [ 721.689832] env[68569]: _type = "Task" [ 721.689832] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.700073] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166773, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.774896] env[68569]: DEBUG nova.network.neutron [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Updated VIF entry in instance network info cache for port 9b282c4e-170f-4f30-8c96-1a9b64168c47. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 721.774896] env[68569]: DEBUG nova.network.neutron [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Updating instance_info_cache with network_info: [{"id": "9b282c4e-170f-4f30-8c96-1a9b64168c47", "address": "fa:16:3e:e5:c7:1e", "network": {"id": "a2e78ea6-bf69-4312-abcf-eb606f96f132", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1264634806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d69fb55927344ad96aa0bf26f8f230e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b282c4e-17", "ovs_interfaceid": "9b282c4e-170f-4f30-8c96-1a9b64168c47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.846858] env[68569]: DEBUG nova.compute.manager [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 721.878851] env[68569]: DEBUG nova.virt.hardware [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 721.879163] env[68569]: DEBUG nova.virt.hardware [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 721.879313] env[68569]: DEBUG nova.virt.hardware [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 721.879707] env[68569]: DEBUG nova.virt.hardware [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 721.879707] env[68569]: DEBUG nova.virt.hardware [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 721.879870] env[68569]: DEBUG nova.virt.hardware [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 721.880044] env[68569]: DEBUG nova.virt.hardware [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 721.880217] env[68569]: DEBUG nova.virt.hardware [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 721.880385] env[68569]: DEBUG nova.virt.hardware [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 721.880546] env[68569]: DEBUG nova.virt.hardware [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 721.880721] env[68569]: DEBUG nova.virt.hardware [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 721.881723] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d22ff3-bfb1-42e5-a782-1d7a10985142 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.890759] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271f985b-8edd-4cc6-9b93-7809911e0b6c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.905792] env[68569]: DEBUG nova.objects.instance [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lazy-loading 'flavor' on Instance uuid fb97d2dd-d42a-42e8-9a36-5c913a58b891 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 721.999519] env[68569]: DEBUG nova.scheduler.client.report [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 722.204570] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166773, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480418} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.204850] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] eec09a1c-e8b2-4b6a-9545-e190e1f965d1/eec09a1c-e8b2-4b6a-9545-e190e1f965d1.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 722.205091] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 722.205398] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ed717ef-ecca-485b-9c1e-18e8b52a0379 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.214249] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 722.214249] env[68569]: value = "task-3166774" [ 722.214249] env[68569]: _type = "Task" [ 722.214249] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.223964] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166774, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.277077] env[68569]: DEBUG oslo_concurrency.lockutils [req-097351e6-a7aa-4f17-b272-0a9389d1eda0 req-5e4d0514-6097-420b-a42d-28ea747aae37 service nova] Releasing lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.277077] env[68569]: DEBUG oslo_concurrency.lockutils [req-e6dc2bac-8daa-4745-a0be-b1eddfe9f4c2 req-36af5529-91dc-4dbe-be1f-710d5add637d service nova] Acquired lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.277077] env[68569]: DEBUG nova.network.neutron [req-e6dc2bac-8daa-4745-a0be-b1eddfe9f4c2 req-36af5529-91dc-4dbe-be1f-710d5add637d service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Refreshing network info cache for port 9b282c4e-170f-4f30-8c96-1a9b64168c47 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 722.416582] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c2e243be-ccff-4c86-abfe-f72ff2ab2b09 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.328s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.467406] env[68569]: DEBUG nova.network.neutron [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Successfully updated port: 2cb53b84-1f50-4d92-95dd-850b9abb24be {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 722.506046] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.506046] env[68569]: DEBUG nova.compute.manager [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 722.509247] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.925s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.510424] env[68569]: INFO nova.compute.claims [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 722.724172] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166774, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072433} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.724553] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 722.725376] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cbf582-1921-429f-b093-80a3abdd72e4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.747472] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] eec09a1c-e8b2-4b6a-9545-e190e1f965d1/eec09a1c-e8b2-4b6a-9545-e190e1f965d1.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 722.747758] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4dfaa3b-f329-412b-9c88-adf1bbdffae9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.769312] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 722.769312] env[68569]: value = "task-3166775" [ 722.769312] env[68569]: _type = "Task" [ 722.769312] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.777855] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166775, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.941668] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.941982] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.970134] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Acquiring lock "refresh_cache-77b9756e-2299-47e2-a6d8-e8026e33a3de" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.970293] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Acquired lock "refresh_cache-77b9756e-2299-47e2-a6d8-e8026e33a3de" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.970445] env[68569]: DEBUG nova.network.neutron [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 723.014850] env[68569]: DEBUG nova.compute.utils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 723.018453] env[68569]: DEBUG nova.compute.manager [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 723.018763] env[68569]: DEBUG nova.network.neutron [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 723.050490] env[68569]: DEBUG nova.network.neutron [req-e6dc2bac-8daa-4745-a0be-b1eddfe9f4c2 req-36af5529-91dc-4dbe-be1f-710d5add637d service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Updated VIF entry in instance network info cache for port 9b282c4e-170f-4f30-8c96-1a9b64168c47. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 723.050862] env[68569]: DEBUG nova.network.neutron [req-e6dc2bac-8daa-4745-a0be-b1eddfe9f4c2 req-36af5529-91dc-4dbe-be1f-710d5add637d service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Updating instance_info_cache with network_info: [{"id": "9b282c4e-170f-4f30-8c96-1a9b64168c47", "address": "fa:16:3e:e5:c7:1e", "network": {"id": "a2e78ea6-bf69-4312-abcf-eb606f96f132", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1264634806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d69fb55927344ad96aa0bf26f8f230e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b282c4e-17", "ovs_interfaceid": "9b282c4e-170f-4f30-8c96-1a9b64168c47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.071178] env[68569]: DEBUG nova.policy [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07f230a9b5d34fa088d03ebaeac27644', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c45df010dfc40089844060df429bb31', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 723.109268] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquiring lock "50abc994-682a-40d6-ae77-601839b98793" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.109509] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Lock "50abc994-682a-40d6-ae77-601839b98793" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.281865] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166775, 'name': ReconfigVM_Task, 'duration_secs': 0.367046} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.282235] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Reconfigured VM instance instance-00000024 to attach disk [datastore1] eec09a1c-e8b2-4b6a-9545-e190e1f965d1/eec09a1c-e8b2-4b6a-9545-e190e1f965d1.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 723.283286] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57973850-ffbf-48d6-aaa3-4178212156fa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.292368] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 723.292368] env[68569]: value = "task-3166776" [ 723.292368] env[68569]: _type = "Task" [ 723.292368] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.307183] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166776, 'name': Rename_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.403516] env[68569]: DEBUG nova.network.neutron [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Successfully created port: 44f67a03-e572-481c-bda6-954144ca8539 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 723.447597] env[68569]: INFO nova.compute.manager [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Detaching volume a00e4c14-852f-4ac3-9275-c48328adff12 [ 723.483569] env[68569]: INFO nova.virt.block_device [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Attempting to driver detach volume a00e4c14-852f-4ac3-9275-c48328adff12 from mountpoint /dev/sdb [ 723.483897] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 723.484126] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633539', 'volume_id': 'a00e4c14-852f-4ac3-9275-c48328adff12', 'name': 'volume-a00e4c14-852f-4ac3-9275-c48328adff12', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fb97d2dd-d42a-42e8-9a36-5c913a58b891', 'attached_at': '', 'detached_at': '', 'volume_id': 'a00e4c14-852f-4ac3-9275-c48328adff12', 'serial': 'a00e4c14-852f-4ac3-9275-c48328adff12'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 723.485016] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7813ae-6ce4-41f6-927a-7f37c3b28df3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.512259] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005e229f-fbb2-40d4-88a3-33a768d9b707 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.515471] env[68569]: DEBUG nova.network.neutron [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.519372] env[68569]: DEBUG nova.compute.manager [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 723.534932] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299e24da-e226-4c67-970e-17019eac85c9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.560380] env[68569]: DEBUG oslo_concurrency.lockutils [req-e6dc2bac-8daa-4745-a0be-b1eddfe9f4c2 req-36af5529-91dc-4dbe-be1f-710d5add637d service nova] Releasing lock "refresh_cache-39a84212-2e52-4dba-b00c-5689564deaf4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.560555] env[68569]: DEBUG nova.compute.manager [req-e6dc2bac-8daa-4745-a0be-b1eddfe9f4c2 req-36af5529-91dc-4dbe-be1f-710d5add637d service nova] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Received event network-vif-deleted-6bb42ed5-e951-4b70-af61-7b4cb927ce5f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 723.564062] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9114dcf4-b271-4c07-9f89-86c1921be188 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.572951] env[68569]: DEBUG nova.compute.manager [req-0da6635b-4a99-4b66-8a2d-a15ee19abf82 req-473aa256-ddec-44ca-8b0c-07a1522f8627 service nova] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Received event network-vif-plugged-2cb53b84-1f50-4d92-95dd-850b9abb24be {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 723.573196] env[68569]: DEBUG oslo_concurrency.lockutils [req-0da6635b-4a99-4b66-8a2d-a15ee19abf82 req-473aa256-ddec-44ca-8b0c-07a1522f8627 service nova] Acquiring lock "77b9756e-2299-47e2-a6d8-e8026e33a3de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.573404] env[68569]: DEBUG oslo_concurrency.lockutils [req-0da6635b-4a99-4b66-8a2d-a15ee19abf82 req-473aa256-ddec-44ca-8b0c-07a1522f8627 service nova] Lock "77b9756e-2299-47e2-a6d8-e8026e33a3de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.573563] env[68569]: DEBUG oslo_concurrency.lockutils [req-0da6635b-4a99-4b66-8a2d-a15ee19abf82 req-473aa256-ddec-44ca-8b0c-07a1522f8627 service nova] Lock "77b9756e-2299-47e2-a6d8-e8026e33a3de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.573778] env[68569]: DEBUG nova.compute.manager [req-0da6635b-4a99-4b66-8a2d-a15ee19abf82 req-473aa256-ddec-44ca-8b0c-07a1522f8627 service nova] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] No waiting events found dispatching network-vif-plugged-2cb53b84-1f50-4d92-95dd-850b9abb24be {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 723.575508] env[68569]: WARNING nova.compute.manager [req-0da6635b-4a99-4b66-8a2d-a15ee19abf82 req-473aa256-ddec-44ca-8b0c-07a1522f8627 service nova] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Received unexpected event network-vif-plugged-2cb53b84-1f50-4d92-95dd-850b9abb24be for instance with vm_state building and task_state spawning. [ 723.575508] env[68569]: DEBUG nova.compute.manager [req-0da6635b-4a99-4b66-8a2d-a15ee19abf82 req-473aa256-ddec-44ca-8b0c-07a1522f8627 service nova] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Received event network-changed-2cb53b84-1f50-4d92-95dd-850b9abb24be {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 723.575508] env[68569]: DEBUG nova.compute.manager [req-0da6635b-4a99-4b66-8a2d-a15ee19abf82 req-473aa256-ddec-44ca-8b0c-07a1522f8627 service nova] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Refreshing instance network info cache due to event network-changed-2cb53b84-1f50-4d92-95dd-850b9abb24be. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 723.575508] env[68569]: DEBUG oslo_concurrency.lockutils [req-0da6635b-4a99-4b66-8a2d-a15ee19abf82 req-473aa256-ddec-44ca-8b0c-07a1522f8627 service nova] Acquiring lock "refresh_cache-77b9756e-2299-47e2-a6d8-e8026e33a3de" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.588983] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] The volume has not been displaced from its original location: [datastore2] volume-a00e4c14-852f-4ac3-9275-c48328adff12/volume-a00e4c14-852f-4ac3-9275-c48328adff12.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 723.596359] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Reconfiguring VM instance instance-0000000c to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 723.599055] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54e6a47f-03e4-409d-a10b-3da7ec7ba8e7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.622451] env[68569]: DEBUG oslo_vmware.api [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 723.622451] env[68569]: value = "task-3166777" [ 723.622451] env[68569]: _type = "Task" [ 723.622451] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.634351] env[68569]: DEBUG oslo_vmware.api [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166777, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.803175] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166776, 'name': Rename_Task, 'duration_secs': 0.179504} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.803856] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 723.803856] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35d54d2f-97bf-40db-89e7-ea3510fb58fd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.810252] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 723.810252] env[68569]: value = "task-3166778" [ 723.810252] env[68569]: _type = "Task" [ 723.810252] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.818900] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166778, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.826360] env[68569]: DEBUG nova.network.neutron [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Updating instance_info_cache with network_info: [{"id": "2cb53b84-1f50-4d92-95dd-850b9abb24be", "address": "fa:16:3e:f7:71:58", "network": {"id": "de8437f1-32d9-4cb8-ae5d-70cf69b42adc", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-945478097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c95c0d8931944fd19a165f4a3af3d1fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cb53b84-1f", "ovs_interfaceid": "2cb53b84-1f50-4d92-95dd-850b9abb24be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.136959] env[68569]: DEBUG oslo_vmware.api [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166777, 'name': ReconfigVM_Task, 'duration_secs': 0.279699} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.139265] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Reconfigured VM instance instance-0000000c to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 724.155955] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e5fd383-e7dd-4f51-bc03-00595a7d5705 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.173061] env[68569]: DEBUG oslo_vmware.api [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 724.173061] env[68569]: value = "task-3166779" [ 724.173061] env[68569]: _type = "Task" [ 724.173061] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.181738] env[68569]: DEBUG oslo_vmware.api [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166779, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.213423] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56418163-568a-41f9-a8ee-816a1703cfaa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.223040] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-429d018b-fb38-41e7-a22b-2bd4e7f4601d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.256793] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520ba7db-8b44-49e8-bcb7-f35e033141b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.265996] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3726977-5dc3-494f-88c6-5978f0a4ceb8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.283251] env[68569]: DEBUG nova.compute.provider_tree [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.320463] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166778, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.330031] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Releasing lock "refresh_cache-77b9756e-2299-47e2-a6d8-e8026e33a3de" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.330031] env[68569]: DEBUG nova.compute.manager [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Instance network_info: |[{"id": "2cb53b84-1f50-4d92-95dd-850b9abb24be", "address": "fa:16:3e:f7:71:58", "network": {"id": "de8437f1-32d9-4cb8-ae5d-70cf69b42adc", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-945478097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c95c0d8931944fd19a165f4a3af3d1fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cb53b84-1f", "ovs_interfaceid": "2cb53b84-1f50-4d92-95dd-850b9abb24be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 724.330392] env[68569]: DEBUG oslo_concurrency.lockutils [req-0da6635b-4a99-4b66-8a2d-a15ee19abf82 req-473aa256-ddec-44ca-8b0c-07a1522f8627 service nova] Acquired lock "refresh_cache-77b9756e-2299-47e2-a6d8-e8026e33a3de" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.330392] env[68569]: DEBUG nova.network.neutron [req-0da6635b-4a99-4b66-8a2d-a15ee19abf82 req-473aa256-ddec-44ca-8b0c-07a1522f8627 service nova] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Refreshing network info cache for port 2cb53b84-1f50-4d92-95dd-850b9abb24be {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 724.331856] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:71:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3a0ddd7d-c321-4187-bdd8-b19044ea2c4a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2cb53b84-1f50-4d92-95dd-850b9abb24be', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 724.338552] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Creating folder: Project (c95c0d8931944fd19a165f4a3af3d1fb). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 724.339367] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1548d73b-1c65-4b6e-8883-862fda5ded5d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.350650] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Created folder: Project (c95c0d8931944fd19a165f4a3af3d1fb) in parent group-v633430. [ 724.350850] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Creating folder: Instances. Parent ref: group-v633542. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 724.351088] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-313afbce-ba89-4d4d-9138-28e802bd17b4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.360541] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Created folder: Instances in parent group-v633542. [ 724.360793] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 724.360987] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 724.361545] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-68d07b3f-db11-477b-b373-a12f63871dd4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.380854] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 724.380854] env[68569]: value = "task-3166782" [ 724.380854] env[68569]: _type = "Task" [ 724.380854] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.389894] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166782, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.539720] env[68569]: DEBUG nova.compute.manager [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 724.561845] env[68569]: DEBUG nova.virt.hardware [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 724.562121] env[68569]: DEBUG nova.virt.hardware [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.562278] env[68569]: DEBUG nova.virt.hardware [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 724.562456] env[68569]: DEBUG nova.virt.hardware [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.562598] env[68569]: DEBUG nova.virt.hardware [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 724.562739] env[68569]: DEBUG nova.virt.hardware [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 724.562944] env[68569]: DEBUG nova.virt.hardware [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 724.563110] env[68569]: DEBUG nova.virt.hardware [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 724.563273] env[68569]: DEBUG nova.virt.hardware [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 724.563427] env[68569]: DEBUG nova.virt.hardware [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 724.563593] env[68569]: DEBUG nova.virt.hardware [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 724.564460] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea2ed49-e209-4f57-afe1-1982af2cca95 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.572460] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59effeb9-5e87-4539-b7d4-a611fca8b1f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.682901] env[68569]: DEBUG oslo_vmware.api [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3166779, 'name': ReconfigVM_Task, 'duration_secs': 0.164854} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.683235] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633539', 'volume_id': 'a00e4c14-852f-4ac3-9275-c48328adff12', 'name': 'volume-a00e4c14-852f-4ac3-9275-c48328adff12', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fb97d2dd-d42a-42e8-9a36-5c913a58b891', 'attached_at': '', 'detached_at': '', 'volume_id': 'a00e4c14-852f-4ac3-9275-c48328adff12', 'serial': 'a00e4c14-852f-4ac3-9275-c48328adff12'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 724.784748] env[68569]: DEBUG nova.scheduler.client.report [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 724.821604] env[68569]: DEBUG oslo_vmware.api [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166778, 'name': PowerOnVM_Task, 'duration_secs': 0.680081} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.821977] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 724.822281] env[68569]: INFO nova.compute.manager [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Took 7.98 seconds to spawn the instance on the hypervisor. [ 724.822464] env[68569]: DEBUG nova.compute.manager [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 724.823561] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ed17d8-bf85-4311-83c8-f0a537f74ab6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.891205] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166782, 'name': CreateVM_Task, 'duration_secs': 0.475859} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.893534] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 724.894251] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.894419] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.894776] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 724.895360] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d297352-8d88-4028-afd3-4535127313ae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.900631] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Waiting for the task: (returnval){ [ 724.900631] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e47685-c3f6-377f-27b4-a417eddd0b4c" [ 724.900631] env[68569]: _type = "Task" [ 724.900631] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.910707] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e47685-c3f6-377f-27b4-a417eddd0b4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.108849] env[68569]: DEBUG nova.network.neutron [req-0da6635b-4a99-4b66-8a2d-a15ee19abf82 req-473aa256-ddec-44ca-8b0c-07a1522f8627 service nova] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Updated VIF entry in instance network info cache for port 2cb53b84-1f50-4d92-95dd-850b9abb24be. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 725.109253] env[68569]: DEBUG nova.network.neutron [req-0da6635b-4a99-4b66-8a2d-a15ee19abf82 req-473aa256-ddec-44ca-8b0c-07a1522f8627 service nova] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Updating instance_info_cache with network_info: [{"id": "2cb53b84-1f50-4d92-95dd-850b9abb24be", "address": "fa:16:3e:f7:71:58", "network": {"id": "de8437f1-32d9-4cb8-ae5d-70cf69b42adc", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-945478097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c95c0d8931944fd19a165f4a3af3d1fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cb53b84-1f", "ovs_interfaceid": "2cb53b84-1f50-4d92-95dd-850b9abb24be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.267749] env[68569]: DEBUG nova.objects.instance [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lazy-loading 'flavor' on Instance uuid fb97d2dd-d42a-42e8-9a36-5c913a58b891 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 725.291215] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.782s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.291770] env[68569]: DEBUG nova.compute.manager [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 725.298863] env[68569]: DEBUG oslo_concurrency.lockutils [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.857s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.298863] env[68569]: DEBUG nova.objects.instance [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lazy-loading 'resources' on Instance uuid 3ee3365b-0799-414b-b2a1-1d219bd9db96 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 725.340706] env[68569]: INFO nova.compute.manager [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Took 43.27 seconds to build instance. [ 725.400654] env[68569]: DEBUG nova.network.neutron [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Successfully updated port: 44f67a03-e572-481c-bda6-954144ca8539 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 725.413064] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e47685-c3f6-377f-27b4-a417eddd0b4c, 'name': SearchDatastore_Task, 'duration_secs': 0.015997} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.413366] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.413585] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 725.413813] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.413946] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.414156] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 725.415168] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23641252-9323-45de-969c-7fd9f3e9b69c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.425328] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 725.425639] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 725.427191] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-784d4b5f-2eba-49f9-a26b-70fd555e182d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.437542] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Waiting for the task: (returnval){ [ 725.437542] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fb7434-98fb-1d54-e92d-67d668b589af" [ 725.437542] env[68569]: _type = "Task" [ 725.437542] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.449631] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fb7434-98fb-1d54-e92d-67d668b589af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.615215] env[68569]: DEBUG oslo_concurrency.lockutils [req-0da6635b-4a99-4b66-8a2d-a15ee19abf82 req-473aa256-ddec-44ca-8b0c-07a1522f8627 service nova] Releasing lock "refresh_cache-77b9756e-2299-47e2-a6d8-e8026e33a3de" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.619191] env[68569]: DEBUG nova.compute.manager [req-46a1cb9a-b919-4070-abbf-0cdde8783ef4 req-12d3d9af-91f6-4c0b-a1e2-699bf3b47791 service nova] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Received event network-vif-plugged-44f67a03-e572-481c-bda6-954144ca8539 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 725.619363] env[68569]: DEBUG oslo_concurrency.lockutils [req-46a1cb9a-b919-4070-abbf-0cdde8783ef4 req-12d3d9af-91f6-4c0b-a1e2-699bf3b47791 service nova] Acquiring lock "0c4d4d93-89bf-4164-973b-af48278a3915-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.619553] env[68569]: DEBUG oslo_concurrency.lockutils [req-46a1cb9a-b919-4070-abbf-0cdde8783ef4 req-12d3d9af-91f6-4c0b-a1e2-699bf3b47791 service nova] Lock "0c4d4d93-89bf-4164-973b-af48278a3915-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.619730] env[68569]: DEBUG oslo_concurrency.lockutils [req-46a1cb9a-b919-4070-abbf-0cdde8783ef4 req-12d3d9af-91f6-4c0b-a1e2-699bf3b47791 service nova] Lock "0c4d4d93-89bf-4164-973b-af48278a3915-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.619889] env[68569]: DEBUG nova.compute.manager [req-46a1cb9a-b919-4070-abbf-0cdde8783ef4 req-12d3d9af-91f6-4c0b-a1e2-699bf3b47791 service nova] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] No waiting events found dispatching network-vif-plugged-44f67a03-e572-481c-bda6-954144ca8539 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 725.620075] env[68569]: WARNING nova.compute.manager [req-46a1cb9a-b919-4070-abbf-0cdde8783ef4 req-12d3d9af-91f6-4c0b-a1e2-699bf3b47791 service nova] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Received unexpected event network-vif-plugged-44f67a03-e572-481c-bda6-954144ca8539 for instance with vm_state building and task_state spawning. [ 725.620233] env[68569]: DEBUG nova.compute.manager [req-46a1cb9a-b919-4070-abbf-0cdde8783ef4 req-12d3d9af-91f6-4c0b-a1e2-699bf3b47791 service nova] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Received event network-changed-44f67a03-e572-481c-bda6-954144ca8539 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 725.620379] env[68569]: DEBUG nova.compute.manager [req-46a1cb9a-b919-4070-abbf-0cdde8783ef4 req-12d3d9af-91f6-4c0b-a1e2-699bf3b47791 service nova] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Refreshing instance network info cache due to event network-changed-44f67a03-e572-481c-bda6-954144ca8539. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 725.620555] env[68569]: DEBUG oslo_concurrency.lockutils [req-46a1cb9a-b919-4070-abbf-0cdde8783ef4 req-12d3d9af-91f6-4c0b-a1e2-699bf3b47791 service nova] Acquiring lock "refresh_cache-0c4d4d93-89bf-4164-973b-af48278a3915" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.620685] env[68569]: DEBUG oslo_concurrency.lockutils [req-46a1cb9a-b919-4070-abbf-0cdde8783ef4 req-12d3d9af-91f6-4c0b-a1e2-699bf3b47791 service nova] Acquired lock "refresh_cache-0c4d4d93-89bf-4164-973b-af48278a3915" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.620833] env[68569]: DEBUG nova.network.neutron [req-46a1cb9a-b919-4070-abbf-0cdde8783ef4 req-12d3d9af-91f6-4c0b-a1e2-699bf3b47791 service nova] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Refreshing network info cache for port 44f67a03-e572-481c-bda6-954144ca8539 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 725.802294] env[68569]: DEBUG nova.compute.utils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 725.803807] env[68569]: DEBUG nova.compute.manager [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 725.804026] env[68569]: DEBUG nova.network.neutron [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 725.843529] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cc36c07-5418-4a7f-8b1a-c7b5bdefbb49 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "eec09a1c-e8b2-4b6a-9545-e190e1f965d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.613s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.856111] env[68569]: DEBUG nova.policy [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73b1c309d1494888945f033a8c5140a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa0ab47201c64b0d87480d4ff90014f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 725.906701] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "refresh_cache-0c4d4d93-89bf-4164-973b-af48278a3915" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.950255] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fb7434-98fb-1d54-e92d-67d668b589af, 'name': SearchDatastore_Task, 'duration_secs': 0.009804} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.951085] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17c7b147-5980-4dc3-9d7c-88d3d26e4207 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.958546] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Waiting for the task: (returnval){ [ 725.958546] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5202bbb5-a322-2cf9-082b-80b739c7a7f4" [ 725.958546] env[68569]: _type = "Task" [ 725.958546] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.966688] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5202bbb5-a322-2cf9-082b-80b739c7a7f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.174432] env[68569]: DEBUG nova.network.neutron [req-46a1cb9a-b919-4070-abbf-0cdde8783ef4 req-12d3d9af-91f6-4c0b-a1e2-699bf3b47791 service nova] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.276535] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5d84b8e8-6507-4715-b866-1b3bc1da45ae tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.334s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.310415] env[68569]: DEBUG nova.compute.manager [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 726.335290] env[68569]: DEBUG nova.network.neutron [req-46a1cb9a-b919-4070-abbf-0cdde8783ef4 req-12d3d9af-91f6-4c0b-a1e2-699bf3b47791 service nova] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.346344] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 726.382113] env[68569]: DEBUG oslo_concurrency.lockutils [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "e77cc179-1f3d-4095-a491-48df7f79bdb9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.382325] env[68569]: DEBUG oslo_concurrency.lockutils [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "e77cc179-1f3d-4095-a491-48df7f79bdb9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.382533] env[68569]: DEBUG oslo_concurrency.lockutils [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "e77cc179-1f3d-4095-a491-48df7f79bdb9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.383103] env[68569]: DEBUG oslo_concurrency.lockutils [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "e77cc179-1f3d-4095-a491-48df7f79bdb9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.383103] env[68569]: DEBUG oslo_concurrency.lockutils [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "e77cc179-1f3d-4095-a491-48df7f79bdb9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.385172] env[68569]: INFO nova.compute.manager [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Terminating instance [ 726.388511] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8efa8d6d-e5a4-491d-bbfb-a5753946181c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.399311] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e354279-7a0f-45e9-b1d9-a0fe56a7cee0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.435972] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183e74bb-9daf-4fc0-8f5f-cdd37122cab9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.443505] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d45a093-3cfd-400c-92f0-dc2b77c55e46 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.448850] env[68569]: DEBUG nova.network.neutron [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Successfully created port: 4711426c-2c79-4cc9-8144-86c583dd1fc2 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 726.460445] env[68569]: DEBUG nova.compute.provider_tree [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.470503] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5202bbb5-a322-2cf9-082b-80b739c7a7f4, 'name': SearchDatastore_Task, 'duration_secs': 0.010602} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.470797] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.471120] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 77b9756e-2299-47e2-a6d8-e8026e33a3de/77b9756e-2299-47e2-a6d8-e8026e33a3de.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 726.471656] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22583f93-c4f4-4788-9846-d75b359ced0b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.478530] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Waiting for the task: (returnval){ [ 726.478530] env[68569]: value = "task-3166783" [ 726.478530] env[68569]: _type = "Task" [ 726.478530] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.486988] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': task-3166783, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.528920] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52763392-36c1-9e2c-1f61-7a8c4b02ac24/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 726.529864] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680b5a32-8679-4f8b-9599-a486431ac30f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.536138] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52763392-36c1-9e2c-1f61-7a8c4b02ac24/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 726.536387] env[68569]: ERROR oslo_vmware.rw_handles [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52763392-36c1-9e2c-1f61-7a8c4b02ac24/disk-0.vmdk due to incomplete transfer. [ 726.538687] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3ffef22d-1ccc-4a98-951e-7c048a20354f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.547215] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52763392-36c1-9e2c-1f61-7a8c4b02ac24/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 726.547215] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Uploaded image 715d763c-7d6b-40b5-8da3-e87ce93cb79e to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 726.547215] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 726.547842] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-44394fd2-227a-4b68-8447-f6dbbf76b9ab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.553333] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 726.553333] env[68569]: value = "task-3166784" [ 726.553333] env[68569]: _type = "Task" [ 726.553333] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.561448] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166784, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.839214] env[68569]: DEBUG oslo_concurrency.lockutils [req-46a1cb9a-b919-4070-abbf-0cdde8783ef4 req-12d3d9af-91f6-4c0b-a1e2-699bf3b47791 service nova] Releasing lock "refresh_cache-0c4d4d93-89bf-4164-973b-af48278a3915" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.839214] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquired lock "refresh_cache-0c4d4d93-89bf-4164-973b-af48278a3915" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.839214] env[68569]: DEBUG nova.network.neutron [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 726.871535] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.895656] env[68569]: DEBUG nova.compute.manager [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 726.896383] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 726.897234] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b18c432-37b0-4878-9514-31c81d54ce40 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.910112] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 726.910545] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-077d2911-b375-437b-82f7-e9411a457702 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.921365] env[68569]: DEBUG oslo_vmware.api [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 726.921365] env[68569]: value = "task-3166785" [ 726.921365] env[68569]: _type = "Task" [ 726.921365] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.930454] env[68569]: DEBUG oslo_vmware.api [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166785, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.966434] env[68569]: DEBUG nova.scheduler.client.report [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 726.991027] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': task-3166783, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501831} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.992254] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 77b9756e-2299-47e2-a6d8-e8026e33a3de/77b9756e-2299-47e2-a6d8-e8026e33a3de.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 726.992723] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 726.993296] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-000fc669-44d6-4792-a287-29caff85df0a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.002537] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Waiting for the task: (returnval){ [ 727.002537] env[68569]: value = "task-3166786" [ 727.002537] env[68569]: _type = "Task" [ 727.002537] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.014338] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': task-3166786, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.066852] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166784, 'name': Destroy_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.319562] env[68569]: DEBUG nova.compute.manager [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 727.347520] env[68569]: DEBUG nova.virt.hardware [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 727.347806] env[68569]: DEBUG nova.virt.hardware [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 727.347992] env[68569]: DEBUG nova.virt.hardware [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 727.348236] env[68569]: DEBUG nova.virt.hardware [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 727.348406] env[68569]: DEBUG nova.virt.hardware [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 727.348624] env[68569]: DEBUG nova.virt.hardware [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 727.348993] env[68569]: DEBUG nova.virt.hardware [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 727.349548] env[68569]: DEBUG nova.virt.hardware [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 727.349548] env[68569]: DEBUG nova.virt.hardware [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 727.349548] env[68569]: DEBUG nova.virt.hardware [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 727.349756] env[68569]: DEBUG nova.virt.hardware [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 727.350533] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5449af-fb09-424f-bd00-621d04130db5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.359567] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79458b0-594d-4a40-87e5-b12f54d7a233 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.381776] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "53cc8dbd-c163-403a-9286-e1f8ad939f94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.382021] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "53cc8dbd-c163-403a-9286-e1f8ad939f94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.402506] env[68569]: DEBUG nova.network.neutron [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.432192] env[68569]: DEBUG oslo_vmware.api [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166785, 'name': PowerOffVM_Task, 'duration_secs': 0.224696} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.432804] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 727.432804] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 727.432939] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd942695-b5ed-4329-8e22-c2c466a23033 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.472687] env[68569]: DEBUG oslo_concurrency.lockutils [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.174s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.475284] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.430s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.477159] env[68569]: INFO nova.compute.claims [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 727.492802] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 727.493215] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 727.493215] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Deleting the datastore file [datastore1] e77cc179-1f3d-4095-a491-48df7f79bdb9 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 727.493473] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c2e49cb-44b4-485b-a880-700ad22ea5bf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.496778] env[68569]: INFO nova.scheduler.client.report [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Deleted allocations for instance 3ee3365b-0799-414b-b2a1-1d219bd9db96 [ 727.508382] env[68569]: DEBUG oslo_vmware.api [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 727.508382] env[68569]: value = "task-3166788" [ 727.508382] env[68569]: _type = "Task" [ 727.508382] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.518030] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': task-3166786, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071704} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.518030] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 727.518030] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613ae5b0-5bfa-442f-9358-5f883eaf826b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.529093] env[68569]: DEBUG oslo_vmware.api [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.549723] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] 77b9756e-2299-47e2-a6d8-e8026e33a3de/77b9756e-2299-47e2-a6d8-e8026e33a3de.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 727.550658] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c7f97e4-a0fc-423a-a42b-50e3a6e35a16 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.577197] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166784, 'name': Destroy_Task, 'duration_secs': 0.541924} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.578613] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Destroyed the VM [ 727.578944] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 727.579313] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Waiting for the task: (returnval){ [ 727.579313] env[68569]: value = "task-3166789" [ 727.579313] env[68569]: _type = "Task" [ 727.579313] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.579569] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9f12fdef-67cb-45ab-b50a-bc9a4ba1b670 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.592056] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': task-3166789, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.594039] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 727.594039] env[68569]: value = "task-3166790" [ 727.594039] env[68569]: _type = "Task" [ 727.594039] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.601518] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166790, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.609681] env[68569]: DEBUG nova.network.neutron [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Updating instance_info_cache with network_info: [{"id": "44f67a03-e572-481c-bda6-954144ca8539", "address": "fa:16:3e:76:fe:2f", "network": {"id": "94e283f4-28e6-4cd4-b6f6-50bce5946d26", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1348032782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c45df010dfc40089844060df429bb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44f67a03-e5", "ovs_interfaceid": "44f67a03-e572-481c-bda6-954144ca8539", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.989079] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "cc5139e1-4601-4966-9224-25b8223b8a57" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.989346] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "cc5139e1-4601-4966-9224-25b8223b8a57" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.999577] env[68569]: DEBUG nova.compute.manager [req-17b1c62c-b2c5-4182-8759-602b4519f071 req-f4dfc9e7-ec2f-435b-a802-7455a07e058e service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Received event network-vif-plugged-4711426c-2c79-4cc9-8144-86c583dd1fc2 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 727.999808] env[68569]: DEBUG oslo_concurrency.lockutils [req-17b1c62c-b2c5-4182-8759-602b4519f071 req-f4dfc9e7-ec2f-435b-a802-7455a07e058e service nova] Acquiring lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.000123] env[68569]: DEBUG oslo_concurrency.lockutils [req-17b1c62c-b2c5-4182-8759-602b4519f071 req-f4dfc9e7-ec2f-435b-a802-7455a07e058e service nova] Lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.000238] env[68569]: DEBUG oslo_concurrency.lockutils [req-17b1c62c-b2c5-4182-8759-602b4519f071 req-f4dfc9e7-ec2f-435b-a802-7455a07e058e service nova] Lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.000405] env[68569]: DEBUG nova.compute.manager [req-17b1c62c-b2c5-4182-8759-602b4519f071 req-f4dfc9e7-ec2f-435b-a802-7455a07e058e service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] No waiting events found dispatching network-vif-plugged-4711426c-2c79-4cc9-8144-86c583dd1fc2 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 728.000569] env[68569]: WARNING nova.compute.manager [req-17b1c62c-b2c5-4182-8759-602b4519f071 req-f4dfc9e7-ec2f-435b-a802-7455a07e058e service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Received unexpected event network-vif-plugged-4711426c-2c79-4cc9-8144-86c583dd1fc2 for instance with vm_state building and task_state spawning. [ 728.008015] env[68569]: DEBUG oslo_concurrency.lockutils [None req-eacd9f00-2fcd-491b-9067-346342d8a743 tempest-ServersAdminNegativeTestJSON-810068084 tempest-ServersAdminNegativeTestJSON-810068084-project-member] Lock "3ee3365b-0799-414b-b2a1-1d219bd9db96" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.164s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.018593] env[68569]: DEBUG oslo_vmware.api [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166788, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160887} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.018875] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 728.019122] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 728.019345] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 728.019559] env[68569]: INFO nova.compute.manager [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Took 1.12 seconds to destroy the instance on the hypervisor. [ 728.019839] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 728.020097] env[68569]: DEBUG nova.compute.manager [-] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 728.020231] env[68569]: DEBUG nova.network.neutron [-] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 728.090839] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': task-3166789, 'name': ReconfigVM_Task, 'duration_secs': 0.334692} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.091204] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Reconfigured VM instance instance-00000025 to attach disk [datastore2] 77b9756e-2299-47e2-a6d8-e8026e33a3de/77b9756e-2299-47e2-a6d8-e8026e33a3de.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 728.092157] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-feb8b4a2-6b74-49a7-a069-e01d75a24b10 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.104594] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166790, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.104594] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Waiting for the task: (returnval){ [ 728.104594] env[68569]: value = "task-3166791" [ 728.104594] env[68569]: _type = "Task" [ 728.104594] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.110969] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': task-3166791, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.112494] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Releasing lock "refresh_cache-0c4d4d93-89bf-4164-973b-af48278a3915" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.112785] env[68569]: DEBUG nova.compute.manager [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Instance network_info: |[{"id": "44f67a03-e572-481c-bda6-954144ca8539", "address": "fa:16:3e:76:fe:2f", "network": {"id": "94e283f4-28e6-4cd4-b6f6-50bce5946d26", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1348032782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c45df010dfc40089844060df429bb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44f67a03-e5", "ovs_interfaceid": "44f67a03-e572-481c-bda6-954144ca8539", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 728.113352] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:fe:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '44f67a03-e572-481c-bda6-954144ca8539', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 728.120983] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 728.121212] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 728.121430] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b541646-03a4-49c1-bbd3-b6dc6f828a69 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.141725] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 728.141725] env[68569]: value = "task-3166792" [ 728.141725] env[68569]: _type = "Task" [ 728.141725] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.150093] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166792, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.199572] env[68569]: DEBUG nova.network.neutron [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Successfully updated port: 4711426c-2c79-4cc9-8144-86c583dd1fc2 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 728.604381] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166790, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.613253] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': task-3166791, 'name': Rename_Task, 'duration_secs': 0.24503} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.615610] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 728.615959] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7500dbb8-123c-40d3-b80c-ceee0789f212 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.622584] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Waiting for the task: (returnval){ [ 728.622584] env[68569]: value = "task-3166793" [ 728.622584] env[68569]: _type = "Task" [ 728.622584] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.634912] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': task-3166793, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.652393] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166792, 'name': CreateVM_Task, 'duration_secs': 0.490054} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.652560] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 728.653287] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.653442] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.654264] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 728.658217] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e417057f-760f-4719-9370-f3d3cd452a7c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.664293] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 728.664293] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e7195e-cc83-9554-7f6b-d52c42ea4ad4" [ 728.664293] env[68569]: _type = "Task" [ 728.664293] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.673254] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e7195e-cc83-9554-7f6b-d52c42ea4ad4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.705597] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.705804] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.705959] env[68569]: DEBUG nova.network.neutron [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 729.104972] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166790, 'name': RemoveSnapshot_Task} progress is 45%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.106234] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d952bb3-e63e-47bb-aecc-498dc7325182 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.112919] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0dd48a-8e50-4dc6-8201-968df24d77e6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.148509] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b809eaa7-810f-408d-94fb-f6a0cfb81f02 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.156936] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': task-3166793, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.160260] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3bb0717-d29a-4f04-927e-da7d673f83bb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.172811] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e7195e-cc83-9554-7f6b-d52c42ea4ad4, 'name': SearchDatastore_Task, 'duration_secs': 0.018957} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.182105] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.182459] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 729.182613] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.182759] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 729.182937] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 729.183475] env[68569]: DEBUG nova.compute.provider_tree [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.187028] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac10ef28-96d2-44ac-a866-7d823a04c367 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.194163] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 729.194360] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 729.197182] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09855958-8fb4-4e82-8b70-126910bc3123 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.199248] env[68569]: DEBUG nova.network.neutron [-] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.204202] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 729.204202] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f18f93-3649-6e6f-c670-321972ba24b4" [ 729.204202] env[68569]: _type = "Task" [ 729.204202] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.217478] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f18f93-3649-6e6f-c670-321972ba24b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.253628] env[68569]: DEBUG nova.network.neutron [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.469043] env[68569]: DEBUG nova.network.neutron [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Updating instance_info_cache with network_info: [{"id": "4711426c-2c79-4cc9-8144-86c583dd1fc2", "address": "fa:16:3e:58:9a:77", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4711426c-2c", "ovs_interfaceid": "4711426c-2c79-4cc9-8144-86c583dd1fc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.605666] env[68569]: DEBUG oslo_vmware.api [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166790, 'name': RemoveSnapshot_Task, 'duration_secs': 1.812877} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.606158] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 729.606223] env[68569]: INFO nova.compute.manager [None req-c1fe8f85-bfc9-4ed4-af3a-579cd4d6966e tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Took 16.17 seconds to snapshot the instance on the hypervisor. [ 729.654209] env[68569]: DEBUG oslo_vmware.api [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': task-3166793, 'name': PowerOnVM_Task, 'duration_secs': 0.731417} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.654209] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 729.654209] env[68569]: INFO nova.compute.manager [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Took 7.81 seconds to spawn the instance on the hypervisor. [ 729.654327] env[68569]: DEBUG nova.compute.manager [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 729.655070] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6919e782-61a9-45e6-a859-bf616d3238d8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.691018] env[68569]: DEBUG nova.scheduler.client.report [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 729.701756] env[68569]: INFO nova.compute.manager [-] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Took 1.68 seconds to deallocate network for instance. [ 729.719131] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f18f93-3649-6e6f-c670-321972ba24b4, 'name': SearchDatastore_Task, 'duration_secs': 0.015668} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.719881] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-791c72ea-832b-43bd-acce-00d14bfc6ecd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.726209] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 729.726209] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52859206-1ca1-5c2e-8e74-d3c8a85f4793" [ 729.726209] env[68569]: _type = "Task" [ 729.726209] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.735188] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52859206-1ca1-5c2e-8e74-d3c8a85f4793, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.972339] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.972689] env[68569]: DEBUG nova.compute.manager [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Instance network_info: |[{"id": "4711426c-2c79-4cc9-8144-86c583dd1fc2", "address": "fa:16:3e:58:9a:77", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4711426c-2c", "ovs_interfaceid": "4711426c-2c79-4cc9-8144-86c583dd1fc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 729.973130] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:9a:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4711426c-2c79-4cc9-8144-86c583dd1fc2', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 729.980647] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Creating folder: Project (aa0ab47201c64b0d87480d4ff90014f5). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 729.980972] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a6982cd-b724-46f6-8fc1-c8016c317d3d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.992366] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Created folder: Project (aa0ab47201c64b0d87480d4ff90014f5) in parent group-v633430. [ 729.992560] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Creating folder: Instances. Parent ref: group-v633546. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 729.992796] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-34663f7b-8975-46e7-9103-96d206b6c5fc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.001764] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Created folder: Instances in parent group-v633546. [ 730.001994] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 730.002192] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 730.002402] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c647910-ae65-4ed8-8bd7-2b5e53545bd2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.022046] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 730.022046] env[68569]: value = "task-3166796" [ 730.022046] env[68569]: _type = "Task" [ 730.022046] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.029322] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166796, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.080147] env[68569]: DEBUG nova.compute.manager [req-01cc2238-54ec-46c7-ad0f-8b9b66feac72 req-30f59d31-856b-40cf-b94a-9973bc7d6091 service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Received event network-changed-4711426c-2c79-4cc9-8144-86c583dd1fc2 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 730.080147] env[68569]: DEBUG nova.compute.manager [req-01cc2238-54ec-46c7-ad0f-8b9b66feac72 req-30f59d31-856b-40cf-b94a-9973bc7d6091 service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Refreshing instance network info cache due to event network-changed-4711426c-2c79-4cc9-8144-86c583dd1fc2. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 730.080147] env[68569]: DEBUG oslo_concurrency.lockutils [req-01cc2238-54ec-46c7-ad0f-8b9b66feac72 req-30f59d31-856b-40cf-b94a-9973bc7d6091 service nova] Acquiring lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.080147] env[68569]: DEBUG oslo_concurrency.lockutils [req-01cc2238-54ec-46c7-ad0f-8b9b66feac72 req-30f59d31-856b-40cf-b94a-9973bc7d6091 service nova] Acquired lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.082723] env[68569]: DEBUG nova.network.neutron [req-01cc2238-54ec-46c7-ad0f-8b9b66feac72 req-30f59d31-856b-40cf-b94a-9973bc7d6091 service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Refreshing network info cache for port 4711426c-2c79-4cc9-8144-86c583dd1fc2 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 730.173277] env[68569]: INFO nova.compute.manager [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Took 42.92 seconds to build instance. [ 730.193617] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.718s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.194136] env[68569]: DEBUG nova.compute.manager [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 730.196509] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 32.526s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.214467] env[68569]: DEBUG oslo_concurrency.lockutils [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.237559] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52859206-1ca1-5c2e-8e74-d3c8a85f4793, 'name': SearchDatastore_Task, 'duration_secs': 0.02626} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.238559] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.241015] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 0c4d4d93-89bf-4164-973b-af48278a3915/0c4d4d93-89bf-4164-973b-af48278a3915.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 730.241015] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d768708-82c5-4933-8c74-2dc2971692ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.246848] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 730.246848] env[68569]: value = "task-3166797" [ 730.246848] env[68569]: _type = "Task" [ 730.246848] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.257285] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166797, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.532939] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166796, 'name': CreateVM_Task, 'duration_secs': 0.330661} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.533140] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 730.533863] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.534051] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.534383] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 730.534676] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50872981-b12d-4c71-93de-f7f5dbdc23c5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.539639] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 730.539639] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52609d63-b494-e697-5b1a-7aa4a4000eaf" [ 730.539639] env[68569]: _type = "Task" [ 730.539639] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.548331] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52609d63-b494-e697-5b1a-7aa4a4000eaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.675251] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f13386b2-7157-46a4-8d38-299dfd0b695b tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Lock "77b9756e-2299-47e2-a6d8-e8026e33a3de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.640s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.711871] env[68569]: DEBUG nova.compute.utils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 730.717025] env[68569]: DEBUG nova.compute.manager [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 730.717025] env[68569]: DEBUG nova.network.neutron [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 730.759184] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166797, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.885207] env[68569]: DEBUG nova.policy [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77016f26e6584b69b41fbde6e7c8d4a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e26b4c60768444592095acce50e7944', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 730.925255] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Acquiring lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.925510] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.028078] env[68569]: DEBUG nova.network.neutron [req-01cc2238-54ec-46c7-ad0f-8b9b66feac72 req-30f59d31-856b-40cf-b94a-9973bc7d6091 service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Updated VIF entry in instance network info cache for port 4711426c-2c79-4cc9-8144-86c583dd1fc2. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 731.028474] env[68569]: DEBUG nova.network.neutron [req-01cc2238-54ec-46c7-ad0f-8b9b66feac72 req-30f59d31-856b-40cf-b94a-9973bc7d6091 service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Updating instance_info_cache with network_info: [{"id": "4711426c-2c79-4cc9-8144-86c583dd1fc2", "address": "fa:16:3e:58:9a:77", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4711426c-2c", "ovs_interfaceid": "4711426c-2c79-4cc9-8144-86c583dd1fc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.050578] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52609d63-b494-e697-5b1a-7aa4a4000eaf, 'name': SearchDatastore_Task, 'duration_secs': 0.010785} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.050857] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.051092] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 731.051419] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.051538] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.051719] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 731.052456] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c39fd7a-2ddb-48bb-b63e-0bdcefec7e68 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.062629] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 731.062629] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 731.063065] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0421a1c2-e0e4-49e0-aa57-ae4581ad0e54 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.067553] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 731.067553] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52753090-e648-3a92-a9c1-0a065bec8655" [ 731.067553] env[68569]: _type = "Task" [ 731.067553] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.077314] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52753090-e648-3a92-a9c1-0a065bec8655, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.179770] env[68569]: DEBUG nova.compute.manager [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 731.218794] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Applying migration context for instance 925400c4-3b33-4f4a-9f63-3ceec06cf0b7 as it has an incoming, in-progress migration d07aff0d-55b9-4193-83cd-8ad96b7a389d. Migration status is error {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 731.221084] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=68569) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1563}} [ 731.221245] env[68569]: INFO nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Updating resource usage from migration aab3fe80-9a23-44ba-bdb2-b6166fdccf6c [ 731.223804] env[68569]: DEBUG nova.compute.manager [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 731.253396] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.253552] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 98d5c760-6da3-49e3-af47-20a8054971f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.255025] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance fb97d2dd-d42a-42e8-9a36-5c913a58b891 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.255025] env[68569]: WARNING nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance e77cc179-1f3d-4095-a491-48df7f79bdb9 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 731.255025] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.255025] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance ad207187-634f-4e7f-9809-eb3f742ddeec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.255299] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 39a84212-2e52-4dba-b00c-5689564deaf4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.255299] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance c56e4282-b1ca-42f5-b346-692779475df0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.255299] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 925400c4-3b33-4f4a-9f63-3ceec06cf0b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.255299] env[68569]: WARNING nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance c9264123-ab19-40d5-959a-791b8966d2f6 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 731.255408] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance b2e6de60-b4e5-4030-bca7-355d17fec06d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.255408] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 2cde3729-1be6-42c5-891f-42a7a8bff267 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.255408] env[68569]: WARNING nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance bda7e09b-848b-4d5d-a49d-6e0639f22f99 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 731.255408] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 7c887df0-4358-46c5-9682-0d4122e96d10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.255408] env[68569]: WARNING nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance ec64b2fd-2409-4af1-8f51-cc0ccbba14f2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 731.256216] env[68569]: WARNING nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 78f486aa-80f4-4d43-bd00-cc6206517a72 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 731.256389] env[68569]: WARNING nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 6606b921-4f3a-44f5-ae4e-c600f26876fc is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 731.256517] env[68569]: WARNING nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 731.256633] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance de2b0206-0c73-4275-89ff-37199520dd71 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.256745] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance eec09a1c-e8b2-4b6a-9545-e190e1f965d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.256854] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 77b9756e-2299-47e2-a6d8-e8026e33a3de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.256961] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 0c4d4d93-89bf-4164-973b-af48278a3915 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.257083] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance a9e87dfc-6e00-4e55-8a8f-bc3174b991da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.257192] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 16b6fafe-524d-482f-961b-10e3601ac4c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.261984] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166797, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722559} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.262436] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 0c4d4d93-89bf-4164-973b-af48278a3915/0c4d4d93-89bf-4164-973b-af48278a3915.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 731.264047] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 731.264047] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3621f6a3-f975-43bf-818b-bff978762ad9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.272382] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 731.272382] env[68569]: value = "task-3166798" [ 731.272382] env[68569]: _type = "Task" [ 731.272382] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.281697] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166798, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.394855] env[68569]: DEBUG nova.network.neutron [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Successfully created port: c60d4a60-c03c-4dcf-ad20-45e8627f3c8f {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 731.429228] env[68569]: DEBUG nova.compute.utils [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 731.531252] env[68569]: DEBUG oslo_concurrency.lockutils [req-01cc2238-54ec-46c7-ad0f-8b9b66feac72 req-30f59d31-856b-40cf-b94a-9973bc7d6091 service nova] Releasing lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.531530] env[68569]: DEBUG nova.compute.manager [req-01cc2238-54ec-46c7-ad0f-8b9b66feac72 req-30f59d31-856b-40cf-b94a-9973bc7d6091 service nova] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Received event network-vif-deleted-b7d8aded-6354-4a88-941e-005173bada5f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 731.537732] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Acquiring lock "7c887df0-4358-46c5-9682-0d4122e96d10" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.537732] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Lock "7c887df0-4358-46c5-9682-0d4122e96d10" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.537732] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Acquiring lock "7c887df0-4358-46c5-9682-0d4122e96d10-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.537962] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Lock "7c887df0-4358-46c5-9682-0d4122e96d10-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.537962] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Lock "7c887df0-4358-46c5-9682-0d4122e96d10-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.540229] env[68569]: INFO nova.compute.manager [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Terminating instance [ 731.579469] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52753090-e648-3a92-a9c1-0a065bec8655, 'name': SearchDatastore_Task, 'duration_secs': 0.019113} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.580315] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-820d17ad-f25c-4324-ad94-e6473c57ffde {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.586058] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 731.586058] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52334021-5524-61e7-643b-79115cfa37a8" [ 731.586058] env[68569]: _type = "Task" [ 731.586058] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.595489] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52334021-5524-61e7-643b-79115cfa37a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.702712] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.764440] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.782537] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166798, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.15218} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.782832] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 731.783749] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1dfba75-d6af-4814-8b79-af9b40c5da36 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.810444] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] 0c4d4d93-89bf-4164-973b-af48278a3915/0c4d4d93-89bf-4164-973b-af48278a3915.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 731.810444] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23f36511-16a7-4fa1-8113-202767b96f34 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.831159] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 731.831159] env[68569]: value = "task-3166799" [ 731.831159] env[68569]: _type = "Task" [ 731.831159] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.841484] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166799, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.925536] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Acquiring lock "77b9756e-2299-47e2-a6d8-e8026e33a3de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.925786] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Lock "77b9756e-2299-47e2-a6d8-e8026e33a3de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.926010] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Acquiring lock "77b9756e-2299-47e2-a6d8-e8026e33a3de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.926231] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Lock "77b9756e-2299-47e2-a6d8-e8026e33a3de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.926419] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Lock "77b9756e-2299-47e2-a6d8-e8026e33a3de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.929508] env[68569]: INFO nova.compute.manager [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Terminating instance [ 731.934103] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.050015] env[68569]: DEBUG nova.compute.manager [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 732.050325] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 732.051943] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c361937-09b1-4b99-8274-f0913cbf22fe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.064755] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 732.065067] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc450fa2-fa31-4fb6-b8bb-87376f03fdbf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.071914] env[68569]: DEBUG oslo_vmware.api [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 732.071914] env[68569]: value = "task-3166800" [ 732.071914] env[68569]: _type = "Task" [ 732.071914] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.080964] env[68569]: DEBUG oslo_vmware.api [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166800, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.096694] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52334021-5524-61e7-643b-79115cfa37a8, 'name': SearchDatastore_Task, 'duration_secs': 0.031352} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.097841] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.097841] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] a9e87dfc-6e00-4e55-8a8f-bc3174b991da/a9e87dfc-6e00-4e55-8a8f-bc3174b991da.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 732.097841] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55f5c09e-16ff-45c9-a324-0efc5964f74b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.104367] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 732.104367] env[68569]: value = "task-3166801" [ 732.104367] env[68569]: _type = "Task" [ 732.104367] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.112620] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3166801, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.233864] env[68569]: DEBUG nova.compute.manager [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 732.264489] env[68569]: DEBUG nova.virt.hardware [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 732.264731] env[68569]: DEBUG nova.virt.hardware [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 732.264886] env[68569]: DEBUG nova.virt.hardware [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 732.266203] env[68569]: DEBUG nova.virt.hardware [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 732.266434] env[68569]: DEBUG nova.virt.hardware [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 732.266768] env[68569]: DEBUG nova.virt.hardware [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 732.268149] env[68569]: DEBUG nova.virt.hardware [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 732.268149] env[68569]: DEBUG nova.virt.hardware [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 732.268425] env[68569]: DEBUG nova.virt.hardware [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 732.268748] env[68569]: DEBUG nova.virt.hardware [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 732.269036] env[68569]: DEBUG nova.virt.hardware [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 732.269995] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 398dd3c7-c630-4a29-b204-80f6fb394ce8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 732.276295] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78a2ac1-e13b-4f41-8ffc-f2dcc1ecf530 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.282505] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb38d02c-a5e3-4e2b-af44-fe11c19675c5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.343631] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166799, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.435077] env[68569]: DEBUG nova.compute.manager [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 732.435387] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 732.436780] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc23ecf-aff1-40e7-befc-f6be3e4c36cb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.445578] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 732.445916] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e3b5fcb-e419-4936-8748-f87141d2c11b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.454077] env[68569]: DEBUG oslo_vmware.api [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Waiting for the task: (returnval){ [ 732.454077] env[68569]: value = "task-3166802" [ 732.454077] env[68569]: _type = "Task" [ 732.454077] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.465033] env[68569]: DEBUG oslo_vmware.api [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': task-3166802, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.585810] env[68569]: DEBUG oslo_vmware.api [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166800, 'name': PowerOffVM_Task, 'duration_secs': 0.245805} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.586482] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 732.586482] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 732.586693] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92556b0c-2f43-4be6-ab2f-42f3d37b0f2d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.616324] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3166801, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.665061] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 732.665429] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 732.665776] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Deleting the datastore file [datastore2] 7c887df0-4358-46c5-9682-0d4122e96d10 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 732.666469] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a734d0c6-0033-4175-9ed8-24c0202eafac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.674169] env[68569]: DEBUG oslo_vmware.api [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for the task: (returnval){ [ 732.674169] env[68569]: value = "task-3166804" [ 732.674169] env[68569]: _type = "Task" [ 732.674169] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.689200] env[68569]: DEBUG oslo_vmware.api [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166804, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.777599] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance b40c9dec-cebc-4d23-8df4-96e804333706 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 732.844419] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166799, 'name': ReconfigVM_Task, 'duration_secs': 0.541279} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.844419] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Reconfigured VM instance instance-00000026 to attach disk [datastore2] 0c4d4d93-89bf-4164-973b-af48278a3915/0c4d4d93-89bf-4164-973b-af48278a3915.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 732.844419] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-52e3ee1b-4d9d-4208-ae40-e03417c375b1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.849024] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 732.849024] env[68569]: value = "task-3166805" [ 732.849024] env[68569]: _type = "Task" [ 732.849024] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.859439] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166805, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.963367] env[68569]: DEBUG oslo_vmware.api [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': task-3166802, 'name': PowerOffVM_Task, 'duration_secs': 0.402113} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.963644] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 732.964413] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 732.964413] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7da12a3-28d1-4350-9762-a5b9bdc40d5a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.026942] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Acquiring lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.027264] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.027503] env[68569]: INFO nova.compute.manager [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Attaching volume f2b113a5-31d1-4fec-9883-264bb0b69860 to /dev/sdb [ 733.030725] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 733.030892] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 733.031086] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Deleting the datastore file [datastore2] 77b9756e-2299-47e2-a6d8-e8026e33a3de {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 733.031355] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4909762a-795f-4e34-a572-67ba31a20073 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.043413] env[68569]: DEBUG oslo_vmware.api [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Waiting for the task: (returnval){ [ 733.043413] env[68569]: value = "task-3166807" [ 733.043413] env[68569]: _type = "Task" [ 733.043413] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.056238] env[68569]: DEBUG oslo_vmware.api [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': task-3166807, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.072988] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8b293b-cf72-4a34-b303-15d0dc4b36ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.081491] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68252547-6b15-42b2-b606-b6ed08f4e2a0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.094150] env[68569]: DEBUG nova.virt.block_device [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Updating existing volume attachment record: 7c94c39d-277f-47c3-beb4-8ab8b9a687da {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 733.117020] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3166801, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.645077} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.117020] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] a9e87dfc-6e00-4e55-8a8f-bc3174b991da/a9e87dfc-6e00-4e55-8a8f-bc3174b991da.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 733.117020] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 733.117020] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f198f67d-4c93-4685-9b4c-efccd975d028 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.121351] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 733.121351] env[68569]: value = "task-3166808" [ 733.121351] env[68569]: _type = "Task" [ 733.121351] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.131334] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3166808, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.184225] env[68569]: DEBUG oslo_vmware.api [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Task: {'id': task-3166804, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.265676} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.184546] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 733.186399] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 733.186399] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 733.186399] env[68569]: INFO nova.compute.manager [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Took 1.13 seconds to destroy the instance on the hypervisor. [ 733.186399] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 733.186399] env[68569]: DEBUG nova.compute.manager [-] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 733.186399] env[68569]: DEBUG nova.network.neutron [-] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 733.259361] env[68569]: DEBUG nova.compute.manager [req-54d516d9-9f06-40a8-819f-569d38569b44 req-330fb8ca-8ff9-4913-8726-6c3ea160d4f6 service nova] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Received event network-vif-plugged-c60d4a60-c03c-4dcf-ad20-45e8627f3c8f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 733.259629] env[68569]: DEBUG oslo_concurrency.lockutils [req-54d516d9-9f06-40a8-819f-569d38569b44 req-330fb8ca-8ff9-4913-8726-6c3ea160d4f6 service nova] Acquiring lock "16b6fafe-524d-482f-961b-10e3601ac4c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.260026] env[68569]: DEBUG oslo_concurrency.lockutils [req-54d516d9-9f06-40a8-819f-569d38569b44 req-330fb8ca-8ff9-4913-8726-6c3ea160d4f6 service nova] Lock "16b6fafe-524d-482f-961b-10e3601ac4c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.260551] env[68569]: DEBUG oslo_concurrency.lockutils [req-54d516d9-9f06-40a8-819f-569d38569b44 req-330fb8ca-8ff9-4913-8726-6c3ea160d4f6 service nova] Lock "16b6fafe-524d-482f-961b-10e3601ac4c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.260782] env[68569]: DEBUG nova.compute.manager [req-54d516d9-9f06-40a8-819f-569d38569b44 req-330fb8ca-8ff9-4913-8726-6c3ea160d4f6 service nova] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] No waiting events found dispatching network-vif-plugged-c60d4a60-c03c-4dcf-ad20-45e8627f3c8f {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 733.261090] env[68569]: WARNING nova.compute.manager [req-54d516d9-9f06-40a8-819f-569d38569b44 req-330fb8ca-8ff9-4913-8726-6c3ea160d4f6 service nova] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Received unexpected event network-vif-plugged-c60d4a60-c03c-4dcf-ad20-45e8627f3c8f for instance with vm_state building and task_state spawning. [ 733.281309] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 64146253-16ab-4d95-83c9-31b74014a040 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 733.360387] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166805, 'name': Rename_Task, 'duration_secs': 0.197659} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.360661] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 733.361071] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76f7e72a-3293-4894-abed-7f8399f50fb3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.367774] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 733.367774] env[68569]: value = "task-3166812" [ 733.367774] env[68569]: _type = "Task" [ 733.367774] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.376645] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166812, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.536564] env[68569]: DEBUG nova.network.neutron [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Successfully updated port: c60d4a60-c03c-4dcf-ad20-45e8627f3c8f {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 733.559918] env[68569]: DEBUG oslo_vmware.api [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Task: {'id': task-3166807, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.2029} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.559918] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 733.560270] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 733.561109] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 733.561109] env[68569]: INFO nova.compute.manager [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Took 1.13 seconds to destroy the instance on the hypervisor. [ 733.561294] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 733.562276] env[68569]: DEBUG nova.compute.manager [-] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 733.562276] env[68569]: DEBUG nova.network.neutron [-] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 733.639782] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3166808, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072038} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.640170] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 733.641015] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c41fcbc-3e72-43e8-8b47-0144de7360ae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.665975] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] a9e87dfc-6e00-4e55-8a8f-bc3174b991da/a9e87dfc-6e00-4e55-8a8f-bc3174b991da.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 733.671019] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20078f7f-b792-4218-b9ee-67cf638a9875 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.691449] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 733.691449] env[68569]: value = "task-3166813" [ 733.691449] env[68569]: _type = "Task" [ 733.691449] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.785279] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance c634f7eb-2f71-473d-8f90-71d74edffecb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 733.882330] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166812, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.039561] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Acquiring lock "refresh_cache-16b6fafe-524d-482f-961b-10e3601ac4c2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.039561] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Acquired lock "refresh_cache-16b6fafe-524d-482f-961b-10e3601ac4c2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.040168] env[68569]: DEBUG nova.network.neutron [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 734.186617] env[68569]: DEBUG nova.network.neutron [-] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.202543] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3166813, 'name': ReconfigVM_Task, 'duration_secs': 0.449972} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.204324] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Reconfigured VM instance instance-00000027 to attach disk [datastore2] a9e87dfc-6e00-4e55-8a8f-bc3174b991da/a9e87dfc-6e00-4e55-8a8f-bc3174b991da.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 734.206128] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b312790-4a69-46bc-9d0d-30f7bee4db1c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.218125] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 734.218125] env[68569]: value = "task-3166814" [ 734.218125] env[68569]: _type = "Task" [ 734.218125] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.227255] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3166814, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.288847] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 734.361885] env[68569]: DEBUG nova.network.neutron [-] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.379072] env[68569]: DEBUG oslo_vmware.api [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166812, 'name': PowerOnVM_Task, 'duration_secs': 0.89887} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.379248] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 734.380363] env[68569]: INFO nova.compute.manager [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Took 9.84 seconds to spawn the instance on the hypervisor. [ 734.380363] env[68569]: DEBUG nova.compute.manager [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 734.380798] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f26096-448b-4720-bb46-79885b99195a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.583722] env[68569]: DEBUG nova.network.neutron [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.690274] env[68569]: INFO nova.compute.manager [-] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Took 1.50 seconds to deallocate network for instance. [ 734.730653] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3166814, 'name': Rename_Task, 'duration_secs': 0.209733} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.731009] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 734.731416] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ea05083-a211-4acc-b2c6-ccb878781e2c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.738195] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 734.738195] env[68569]: value = "task-3166815" [ 734.738195] env[68569]: _type = "Task" [ 734.738195] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.746677] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3166815, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.756832] env[68569]: DEBUG nova.network.neutron [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Updating instance_info_cache with network_info: [{"id": "c60d4a60-c03c-4dcf-ad20-45e8627f3c8f", "address": "fa:16:3e:7d:1c:98", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc60d4a60-c0", "ovs_interfaceid": "c60d4a60-c03c-4dcf-ad20-45e8627f3c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.792789] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 98efdafe-e02b-46ca-a701-b70042513128 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 734.864990] env[68569]: INFO nova.compute.manager [-] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Took 1.30 seconds to deallocate network for instance. [ 734.899820] env[68569]: INFO nova.compute.manager [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Took 47.53 seconds to build instance. [ 735.134856] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Acquiring lock "9eafa273-097b-48ac-ae5f-4f7a469ac861" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.135531] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Lock "9eafa273-097b-48ac-ae5f-4f7a469ac861" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.200294] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.248582] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3166815, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.259309] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Releasing lock "refresh_cache-16b6fafe-524d-482f-961b-10e3601ac4c2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.262021] env[68569]: DEBUG nova.compute.manager [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Instance network_info: |[{"id": "c60d4a60-c03c-4dcf-ad20-45e8627f3c8f", "address": "fa:16:3e:7d:1c:98", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc60d4a60-c0", "ovs_interfaceid": "c60d4a60-c03c-4dcf-ad20-45e8627f3c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 735.262389] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:1c:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c60d4a60-c03c-4dcf-ad20-45e8627f3c8f', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 735.270101] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Creating folder: Project (8e26b4c60768444592095acce50e7944). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 735.270771] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72ef121a-15dc-4015-a6a3-94b6e9a3fd06 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.282193] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Created folder: Project (8e26b4c60768444592095acce50e7944) in parent group-v633430. [ 735.282405] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Creating folder: Instances. Parent ref: group-v633552. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 735.282676] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2dea63f7-72fd-428c-8c34-89b8b5763ec3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.292079] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Created folder: Instances in parent group-v633552. [ 735.292431] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 735.292615] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 735.292830] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91b39d64-be6b-4a6d-b943-2ba4575b358e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.308573] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance fd803a5e-8dbd-449e-b45d-1e6410a286e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.308757] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Migration aab3fe80-9a23-44ba-bdb2-b6166fdccf6c is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 735.316737] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 735.316737] env[68569]: value = "task-3166818" [ 735.316737] env[68569]: _type = "Task" [ 735.316737] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.326529] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166818, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.372821] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.446130] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98462af8-9c16-4fbd-ae3c-ed6bc5929ffb tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "0c4d4d93-89bf-4164-973b-af48278a3915" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.257s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.466674] env[68569]: DEBUG nova.compute.manager [req-5c068909-1f4d-4be1-b01e-25794cbe4b2d req-1bf38976-e072-47c5-818d-c7d3ab6c0062 service nova] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Received event network-changed-c60d4a60-c03c-4dcf-ad20-45e8627f3c8f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 735.466726] env[68569]: DEBUG nova.compute.manager [req-5c068909-1f4d-4be1-b01e-25794cbe4b2d req-1bf38976-e072-47c5-818d-c7d3ab6c0062 service nova] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Refreshing instance network info cache due to event network-changed-c60d4a60-c03c-4dcf-ad20-45e8627f3c8f. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 735.466956] env[68569]: DEBUG oslo_concurrency.lockutils [req-5c068909-1f4d-4be1-b01e-25794cbe4b2d req-1bf38976-e072-47c5-818d-c7d3ab6c0062 service nova] Acquiring lock "refresh_cache-16b6fafe-524d-482f-961b-10e3601ac4c2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.467138] env[68569]: DEBUG oslo_concurrency.lockutils [req-5c068909-1f4d-4be1-b01e-25794cbe4b2d req-1bf38976-e072-47c5-818d-c7d3ab6c0062 service nova] Acquired lock "refresh_cache-16b6fafe-524d-482f-961b-10e3601ac4c2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.467300] env[68569]: DEBUG nova.network.neutron [req-5c068909-1f4d-4be1-b01e-25794cbe4b2d req-1bf38976-e072-47c5-818d-c7d3ab6c0062 service nova] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Refreshing network info cache for port c60d4a60-c03c-4dcf-ad20-45e8627f3c8f {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 735.751291] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3166815, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.812902] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 492c0fa1-f821-496a-86c2-f7686479a733 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.812902] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 912303de-a79d-41b0-ab44-c79e850a4dee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 735.830108] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166818, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.948158] env[68569]: DEBUG nova.compute.manager [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 736.241132] env[68569]: DEBUG nova.network.neutron [req-5c068909-1f4d-4be1-b01e-25794cbe4b2d req-1bf38976-e072-47c5-818d-c7d3ab6c0062 service nova] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Updated VIF entry in instance network info cache for port c60d4a60-c03c-4dcf-ad20-45e8627f3c8f. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 736.241550] env[68569]: DEBUG nova.network.neutron [req-5c068909-1f4d-4be1-b01e-25794cbe4b2d req-1bf38976-e072-47c5-818d-c7d3ab6c0062 service nova] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Updating instance_info_cache with network_info: [{"id": "c60d4a60-c03c-4dcf-ad20-45e8627f3c8f", "address": "fa:16:3e:7d:1c:98", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc60d4a60-c0", "ovs_interfaceid": "c60d4a60-c03c-4dcf-ad20-45e8627f3c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.258120] env[68569]: DEBUG oslo_vmware.api [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3166815, 'name': PowerOnVM_Task, 'duration_secs': 1.383815} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.258394] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 736.258588] env[68569]: INFO nova.compute.manager [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Took 8.94 seconds to spawn the instance on the hypervisor. [ 736.258758] env[68569]: DEBUG nova.compute.manager [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 736.259731] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4847a01-4669-438c-8966-779b6ad32495 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.321381] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 736.334257] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166818, 'name': CreateVM_Task, 'duration_secs': 0.836276} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.336160] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 736.337037] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.337383] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.337831] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 736.339121] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b23e7600-2d2a-49cb-a466-4bcbafc7f421 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.344090] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Waiting for the task: (returnval){ [ 736.344090] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d6beb0-80c0-5291-89ce-30f15cc5ac44" [ 736.344090] env[68569]: _type = "Task" [ 736.344090] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.352522] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d6beb0-80c0-5291-89ce-30f15cc5ac44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.469509] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.752698] env[68569]: DEBUG oslo_concurrency.lockutils [req-5c068909-1f4d-4be1-b01e-25794cbe4b2d req-1bf38976-e072-47c5-818d-c7d3ab6c0062 service nova] Releasing lock "refresh_cache-16b6fafe-524d-482f-961b-10e3601ac4c2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.752996] env[68569]: DEBUG nova.compute.manager [req-5c068909-1f4d-4be1-b01e-25794cbe4b2d req-1bf38976-e072-47c5-818d-c7d3ab6c0062 service nova] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Received event network-vif-deleted-67421e24-0bc2-4e26-a6ac-551a8eb20275 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 736.753198] env[68569]: DEBUG nova.compute.manager [req-5c068909-1f4d-4be1-b01e-25794cbe4b2d req-1bf38976-e072-47c5-818d-c7d3ab6c0062 service nova] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Received event network-vif-deleted-2cb53b84-1f50-4d92-95dd-850b9abb24be {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 736.781103] env[68569]: INFO nova.compute.manager [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Took 43.21 seconds to build instance. [ 736.828892] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 50abc994-682a-40d6-ae77-601839b98793 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 736.855959] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d6beb0-80c0-5291-89ce-30f15cc5ac44, 'name': SearchDatastore_Task, 'duration_secs': 0.013486} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.856308] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.856660] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 736.856939] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.857080] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.857246] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 736.857505] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68d61ac8-fce8-4fd0-b81b-4efedde660d8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.866533] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 736.866720] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 736.867741] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd3282da-3aa3-4b59-903b-81e6b64d1d15 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.874350] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Waiting for the task: (returnval){ [ 736.874350] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52476981-0f62-24ac-0532-ad85ca949d44" [ 736.874350] env[68569]: _type = "Task" [ 736.874350] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.883017] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52476981-0f62-24ac-0532-ad85ca949d44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.282189] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fd1cd90a-e6ba-4d46-b7b6-74f54f931137 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.710s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.334760] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 53cc8dbd-c163-403a-9286-e1f8ad939f94 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.385789] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52476981-0f62-24ac-0532-ad85ca949d44, 'name': SearchDatastore_Task, 'duration_secs': 0.009179} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.388023] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b3810ca-4574-4bf2-b922-aa25b3ee3205 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.393056] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Waiting for the task: (returnval){ [ 737.393056] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524cb1d3-7525-f4b5-c7aa-075aeeb70ce6" [ 737.393056] env[68569]: _type = "Task" [ 737.393056] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.400839] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524cb1d3-7525-f4b5-c7aa-075aeeb70ce6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.506440] env[68569]: DEBUG nova.compute.manager [req-9799b430-5514-4a70-a147-a190f4281c33 req-0c1ef408-f6eb-49a2-8308-f95b538414e4 service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Received event network-changed-4711426c-2c79-4cc9-8144-86c583dd1fc2 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 737.506564] env[68569]: DEBUG nova.compute.manager [req-9799b430-5514-4a70-a147-a190f4281c33 req-0c1ef408-f6eb-49a2-8308-f95b538414e4 service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Refreshing instance network info cache due to event network-changed-4711426c-2c79-4cc9-8144-86c583dd1fc2. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 737.506787] env[68569]: DEBUG oslo_concurrency.lockutils [req-9799b430-5514-4a70-a147-a190f4281c33 req-0c1ef408-f6eb-49a2-8308-f95b538414e4 service nova] Acquiring lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.506891] env[68569]: DEBUG oslo_concurrency.lockutils [req-9799b430-5514-4a70-a147-a190f4281c33 req-0c1ef408-f6eb-49a2-8308-f95b538414e4 service nova] Acquired lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.507113] env[68569]: DEBUG nova.network.neutron [req-9799b430-5514-4a70-a147-a190f4281c33 req-0c1ef408-f6eb-49a2-8308-f95b538414e4 service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Refreshing network info cache for port 4711426c-2c79-4cc9-8144-86c583dd1fc2 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 737.652861] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 737.653161] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633551', 'volume_id': 'f2b113a5-31d1-4fec-9883-264bb0b69860', 'name': 'volume-f2b113a5-31d1-4fec-9883-264bb0b69860', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '50b9775c-ddbd-4e8f-a2b8-b08c3028fc28', 'attached_at': '', 'detached_at': '', 'volume_id': 'f2b113a5-31d1-4fec-9883-264bb0b69860', 'serial': 'f2b113a5-31d1-4fec-9883-264bb0b69860'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 737.654121] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2a25bc-6bb7-4a47-a361-0a400d5cda82 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.671442] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec45059-fcaa-4c9c-8900-7cf24743e222 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.697446] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] volume-f2b113a5-31d1-4fec-9883-264bb0b69860/volume-f2b113a5-31d1-4fec-9883-264bb0b69860.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 737.697803] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1dd25963-895e-4d5e-aa69-551dc3fb0e31 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.715463] env[68569]: DEBUG oslo_vmware.api [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Waiting for the task: (returnval){ [ 737.715463] env[68569]: value = "task-3166820" [ 737.715463] env[68569]: _type = "Task" [ 737.715463] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.723146] env[68569]: DEBUG oslo_vmware.api [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Task: {'id': task-3166820, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.787857] env[68569]: DEBUG nova.compute.manager [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 737.839072] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance cc5139e1-4601-4966-9224-25b8223b8a57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 737.839072] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 737.839072] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4032MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 737.907135] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524cb1d3-7525-f4b5-c7aa-075aeeb70ce6, 'name': SearchDatastore_Task, 'duration_secs': 0.009256} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.907135] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.907135] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 16b6fafe-524d-482f-961b-10e3601ac4c2/16b6fafe-524d-482f-961b-10e3601ac4c2.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 737.907135] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32e87b25-aba2-4aeb-9198-80096a9b77e5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.913138] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Waiting for the task: (returnval){ [ 737.913138] env[68569]: value = "task-3166821" [ 737.913138] env[68569]: _type = "Task" [ 737.913138] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.923832] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166821, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.035358] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fd0f0f9-06ac-498c-ab1e-804f449a8904 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "de2b0206-0c73-4275-89ff-37199520dd71" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.035578] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fd0f0f9-06ac-498c-ab1e-804f449a8904 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "de2b0206-0c73-4275-89ff-37199520dd71" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.035778] env[68569]: DEBUG nova.compute.manager [None req-5fd0f0f9-06ac-498c-ab1e-804f449a8904 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 738.036636] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d86bd77-edd0-4c54-b03a-c930b8db5224 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.045412] env[68569]: DEBUG nova.compute.manager [None req-5fd0f0f9-06ac-498c-ab1e-804f449a8904 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68569) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 738.045944] env[68569]: DEBUG nova.objects.instance [None req-5fd0f0f9-06ac-498c-ab1e-804f449a8904 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lazy-loading 'flavor' on Instance uuid de2b0206-0c73-4275-89ff-37199520dd71 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 738.227780] env[68569]: DEBUG oslo_vmware.api [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Task: {'id': task-3166820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.318676] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.349142] env[68569]: DEBUG nova.network.neutron [req-9799b430-5514-4a70-a147-a190f4281c33 req-0c1ef408-f6eb-49a2-8308-f95b538414e4 service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Updated VIF entry in instance network info cache for port 4711426c-2c79-4cc9-8144-86c583dd1fc2. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 738.349533] env[68569]: DEBUG nova.network.neutron [req-9799b430-5514-4a70-a147-a190f4281c33 req-0c1ef408-f6eb-49a2-8308-f95b538414e4 service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Updating instance_info_cache with network_info: [{"id": "4711426c-2c79-4cc9-8144-86c583dd1fc2", "address": "fa:16:3e:58:9a:77", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4711426c-2c", "ovs_interfaceid": "4711426c-2c79-4cc9-8144-86c583dd1fc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.422725] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166821, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472104} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.424700] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 16b6fafe-524d-482f-961b-10e3601ac4c2/16b6fafe-524d-482f-961b-10e3601ac4c2.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 738.424913] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 738.425332] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f235da24-e7f2-4347-b78b-55b5530b2f9c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.431194] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Waiting for the task: (returnval){ [ 738.431194] env[68569]: value = "task-3166822" [ 738.431194] env[68569]: _type = "Task" [ 738.431194] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.438340] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166822, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.464045] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9926ada1-63ed-48e9-ad6e-2a74dc30b653 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.470814] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bdd343-2c2f-46a6-b2d0-25788578982c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.500325] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Acquiring lock "2cde3729-1be6-42c5-891f-42a7a8bff267" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.500325] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Lock "2cde3729-1be6-42c5-891f-42a7a8bff267" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.500603] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Acquiring lock "2cde3729-1be6-42c5-891f-42a7a8bff267-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.500680] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Lock "2cde3729-1be6-42c5-891f-42a7a8bff267-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.500839] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Lock "2cde3729-1be6-42c5-891f-42a7a8bff267-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.503562] env[68569]: INFO nova.compute.manager [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Terminating instance [ 738.505351] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5130b766-fae8-41d7-91b6-db04595416c7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.513701] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5243e4-107a-4770-b6e3-e4aaf25fcace {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.528354] env[68569]: DEBUG nova.compute.provider_tree [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.726511] env[68569]: DEBUG oslo_vmware.api [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Task: {'id': task-3166820, 'name': ReconfigVM_Task, 'duration_secs': 0.572121} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.726851] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Reconfigured VM instance instance-0000000f to attach disk [datastore1] volume-f2b113a5-31d1-4fec-9883-264bb0b69860/volume-f2b113a5-31d1-4fec-9883-264bb0b69860.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 738.731667] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c8fb3ff-3941-40d7-9be4-8b0ee06cabb5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.746735] env[68569]: DEBUG oslo_vmware.api [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Waiting for the task: (returnval){ [ 738.746735] env[68569]: value = "task-3166823" [ 738.746735] env[68569]: _type = "Task" [ 738.746735] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.756305] env[68569]: DEBUG oslo_vmware.api [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Task: {'id': task-3166823, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.852530] env[68569]: DEBUG oslo_concurrency.lockutils [req-9799b430-5514-4a70-a147-a190f4281c33 req-0c1ef408-f6eb-49a2-8308-f95b538414e4 service nova] Releasing lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.941858] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166822, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.011078] env[68569]: DEBUG nova.compute.manager [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 739.011337] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 739.011625] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5f2c8bc-5ab1-4ff3-963c-253e4ae2aa1f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.018859] env[68569]: DEBUG oslo_vmware.api [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Waiting for the task: (returnval){ [ 739.018859] env[68569]: value = "task-3166824" [ 739.018859] env[68569]: _type = "Task" [ 739.018859] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.028478] env[68569]: DEBUG oslo_vmware.api [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166824, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.031909] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 739.052269] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fd0f0f9-06ac-498c-ab1e-804f449a8904 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 739.052576] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c101974-c64a-4eb0-a61d-de104cd6935d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.059725] env[68569]: DEBUG oslo_vmware.api [None req-5fd0f0f9-06ac-498c-ab1e-804f449a8904 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 739.059725] env[68569]: value = "task-3166825" [ 739.059725] env[68569]: _type = "Task" [ 739.059725] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.068078] env[68569]: DEBUG oslo_vmware.api [None req-5fd0f0f9-06ac-498c-ab1e-804f449a8904 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166825, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.259178] env[68569]: DEBUG oslo_vmware.api [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Task: {'id': task-3166823, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.441204] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166822, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.916649} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.441506] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 739.442299] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b862ea-dba8-4d72-8ec6-aefae7cd226d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.466057] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 16b6fafe-524d-482f-961b-10e3601ac4c2/16b6fafe-524d-482f-961b-10e3601ac4c2.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 739.466239] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2133d2cd-2b4f-46ea-80df-3c638ce4d96e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.485392] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Waiting for the task: (returnval){ [ 739.485392] env[68569]: value = "task-3166826" [ 739.485392] env[68569]: _type = "Task" [ 739.485392] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.493656] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166826, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.528570] env[68569]: DEBUG oslo_vmware.api [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166824, 'name': PowerOffVM_Task, 'duration_secs': 0.208111} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.528829] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 739.529029] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 739.529267] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633474', 'volume_id': 'b88d036e-c8ce-4222-807f-25716446b927', 'name': 'volume-b88d036e-c8ce-4222-807f-25716446b927', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2cde3729-1be6-42c5-891f-42a7a8bff267', 'attached_at': '', 'detached_at': '', 'volume_id': 'b88d036e-c8ce-4222-807f-25716446b927', 'serial': 'b88d036e-c8ce-4222-807f-25716446b927'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 739.530059] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4734e9-0e2e-48f9-b5af-b64b107226e2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.547746] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 739.548008] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.352s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.548404] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.695s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.548598] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.550685] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.915s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.552168] env[68569]: INFO nova.compute.claims [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 739.555419] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993d4b41-bdab-4819-9bf1-61ff10386d95 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.566149] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5416630e-a1a4-42cd-8bda-8d8fb067819d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.573667] env[68569]: DEBUG oslo_vmware.api [None req-5fd0f0f9-06ac-498c-ab1e-804f449a8904 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166825, 'name': PowerOffVM_Task, 'duration_secs': 0.400906} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.590794] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fd0f0f9-06ac-498c-ab1e-804f449a8904 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 739.591059] env[68569]: DEBUG nova.compute.manager [None req-5fd0f0f9-06ac-498c-ab1e-804f449a8904 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 739.592319] env[68569]: INFO nova.scheduler.client.report [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Deleted allocations for instance bda7e09b-848b-4d5d-a49d-6e0639f22f99 [ 739.593914] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169fec89-2464-4da8-abf6-a688edc4b0a6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.597223] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1461d922-8cb0-42c4-8312-d3f15cd9fdb3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.620600] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] The volume has not been displaced from its original location: [datastore2] volume-b88d036e-c8ce-4222-807f-25716446b927/volume-b88d036e-c8ce-4222-807f-25716446b927.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 739.626213] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Reconfiguring VM instance instance-0000001b to detach disk 2000 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 739.626213] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-540eccfe-4210-4dda-9afd-b1fde90a06ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.645107] env[68569]: DEBUG oslo_vmware.api [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Waiting for the task: (returnval){ [ 739.645107] env[68569]: value = "task-3166827" [ 739.645107] env[68569]: _type = "Task" [ 739.645107] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.654448] env[68569]: DEBUG oslo_vmware.api [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166827, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.758027] env[68569]: DEBUG oslo_vmware.api [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Task: {'id': task-3166823, 'name': ReconfigVM_Task, 'duration_secs': 0.65288} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.758027] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633551', 'volume_id': 'f2b113a5-31d1-4fec-9883-264bb0b69860', 'name': 'volume-f2b113a5-31d1-4fec-9883-264bb0b69860', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '50b9775c-ddbd-4e8f-a2b8-b08c3028fc28', 'attached_at': '', 'detached_at': '', 'volume_id': 'f2b113a5-31d1-4fec-9883-264bb0b69860', 'serial': 'f2b113a5-31d1-4fec-9883-264bb0b69860'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 739.996467] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166826, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.107103] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6c47f05-6658-4297-bd37-3a0cf8c2e0e8 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "bda7e09b-848b-4d5d-a49d-6e0639f22f99" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.927s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.129086] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fd0f0f9-06ac-498c-ab1e-804f449a8904 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "de2b0206-0c73-4275-89ff-37199520dd71" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.093s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.155884] env[68569]: DEBUG oslo_vmware.api [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166827, 'name': ReconfigVM_Task, 'duration_secs': 0.500108} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.156194] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Reconfigured VM instance instance-0000001b to detach disk 2000 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 740.161415] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db97d4a0-3dc3-458f-bdcd-9d1763a5cf14 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.177074] env[68569]: DEBUG oslo_vmware.api [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Waiting for the task: (returnval){ [ 740.177074] env[68569]: value = "task-3166828" [ 740.177074] env[68569]: _type = "Task" [ 740.177074] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.186317] env[68569]: DEBUG oslo_vmware.api [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166828, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.499638] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166826, 'name': ReconfigVM_Task, 'duration_secs': 0.896267} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.500192] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 16b6fafe-524d-482f-961b-10e3601ac4c2/16b6fafe-524d-482f-961b-10e3601ac4c2.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 740.501147] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7062297a-e23b-4690-a162-0c7893f3aa15 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.509154] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Waiting for the task: (returnval){ [ 740.509154] env[68569]: value = "task-3166829" [ 740.509154] env[68569]: _type = "Task" [ 740.509154] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.521328] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166829, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.567401] env[68569]: DEBUG nova.objects.instance [None req-86fab380-dc73-4155-b8a1-584d934cf8f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lazy-loading 'flavor' on Instance uuid de2b0206-0c73-4275-89ff-37199520dd71 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 740.688606] env[68569]: DEBUG oslo_vmware.api [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166828, 'name': ReconfigVM_Task, 'duration_secs': 0.166532} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.692542] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633474', 'volume_id': 'b88d036e-c8ce-4222-807f-25716446b927', 'name': 'volume-b88d036e-c8ce-4222-807f-25716446b927', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2cde3729-1be6-42c5-891f-42a7a8bff267', 'attached_at': '', 'detached_at': '', 'volume_id': 'b88d036e-c8ce-4222-807f-25716446b927', 'serial': 'b88d036e-c8ce-4222-807f-25716446b927'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 740.693035] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 740.694470] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b3d78c-4879-4c94-afac-7ce1eb642c7d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.704039] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 740.704039] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25592df2-9186-4c4a-8cc9-144053287bf9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.779432] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 740.779744] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 740.780013] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Deleting the datastore file [datastore2] 2cde3729-1be6-42c5-891f-42a7a8bff267 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 740.780370] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9a84b89-1c19-4299-98e1-61dc106bf9c6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.789310] env[68569]: DEBUG oslo_vmware.api [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Waiting for the task: (returnval){ [ 740.789310] env[68569]: value = "task-3166831" [ 740.789310] env[68569]: _type = "Task" [ 740.789310] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.804662] env[68569]: DEBUG nova.objects.instance [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Lazy-loading 'flavor' on Instance uuid 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 740.807175] env[68569]: DEBUG oslo_vmware.api [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166831, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.023345] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166829, 'name': Rename_Task, 'duration_secs': 0.220363} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.023617] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 741.023863] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9535fac9-96c3-44b8-8b35-6298b0eca636 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.031279] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Waiting for the task: (returnval){ [ 741.031279] env[68569]: value = "task-3166832" [ 741.031279] env[68569]: _type = "Task" [ 741.031279] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.039189] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166832, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.072199] env[68569]: DEBUG oslo_concurrency.lockutils [None req-86fab380-dc73-4155-b8a1-584d934cf8f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "refresh_cache-de2b0206-0c73-4275-89ff-37199520dd71" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.072360] env[68569]: DEBUG oslo_concurrency.lockutils [None req-86fab380-dc73-4155-b8a1-584d934cf8f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquired lock "refresh_cache-de2b0206-0c73-4275-89ff-37199520dd71" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.072529] env[68569]: DEBUG nova.network.neutron [None req-86fab380-dc73-4155-b8a1-584d934cf8f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.072697] env[68569]: DEBUG nova.objects.instance [None req-86fab380-dc73-4155-b8a1-584d934cf8f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lazy-loading 'info_cache' on Instance uuid de2b0206-0c73-4275-89ff-37199520dd71 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 741.159770] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850bad8e-4e49-4b2a-aece-0d93b988de97 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.167680] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8dfe40-745f-4b0a-b639-1d18f90e1f65 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.203356] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c13c8a-44ee-4bff-898f-5ff6a85a5992 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.213925] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f54eea6-9e40-41fd-9143-ff3a919f1c38 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.228726] env[68569]: DEBUG nova.compute.provider_tree [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 741.298813] env[68569]: DEBUG oslo_vmware.api [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Task: {'id': task-3166831, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105233} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.299115] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 741.299490] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 741.299683] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 741.299863] env[68569]: INFO nova.compute.manager [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Took 2.29 seconds to destroy the instance on the hypervisor. [ 741.300124] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 741.300323] env[68569]: DEBUG nova.compute.manager [-] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 741.300419] env[68569]: DEBUG nova.network.neutron [-] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 741.310972] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6b34a7aa-2aba-4a77-beea-f2ca1ec78c7c tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.284s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.546152] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166832, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.577066] env[68569]: DEBUG nova.objects.base [None req-86fab380-dc73-4155-b8a1-584d934cf8f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 741.731593] env[68569]: DEBUG nova.scheduler.client.report [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 741.917943] env[68569]: DEBUG nova.compute.manager [req-7d00af2c-ab78-45ae-ae1d-9b27f11591d0 req-f01dedc7-f5a1-4a0d-bf39-332abeb16b54 service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Received event network-vif-deleted-b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 741.918234] env[68569]: INFO nova.compute.manager [req-7d00af2c-ab78-45ae-ae1d-9b27f11591d0 req-f01dedc7-f5a1-4a0d-bf39-332abeb16b54 service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Neutron deleted interface b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4; detaching it from the instance and deleting it from the info cache [ 741.918425] env[68569]: DEBUG nova.network.neutron [req-7d00af2c-ab78-45ae-ae1d-9b27f11591d0 req-f01dedc7-f5a1-4a0d-bf39-332abeb16b54 service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.003180] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Acquiring lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.003467] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.044171] env[68569]: DEBUG oslo_vmware.api [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166832, 'name': PowerOnVM_Task, 'duration_secs': 0.536782} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.044426] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 742.044663] env[68569]: INFO nova.compute.manager [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Took 9.81 seconds to spawn the instance on the hypervisor. [ 742.044941] env[68569]: DEBUG nova.compute.manager [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 742.045924] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01912ce3-0fda-4d7b-bf70-f7a80191e50e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.239040] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.688s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.239620] env[68569]: DEBUG nova.compute.manager [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 742.242744] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.710s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.242945] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.245120] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.676s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.245320] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.246970] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.517s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.247197] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.248765] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.426s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.250564] env[68569]: INFO nova.compute.claims [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 742.289627] env[68569]: INFO nova.scheduler.client.report [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Deleted allocations for instance c9264123-ab19-40d5-959a-791b8966d2f6 [ 742.289627] env[68569]: INFO nova.scheduler.client.report [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Deleted allocations for instance 78f486aa-80f4-4d43-bd00-cc6206517a72 [ 742.322976] env[68569]: INFO nova.scheduler.client.report [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Deleted allocations for instance ec64b2fd-2409-4af1-8f51-cc0ccbba14f2 [ 742.346544] env[68569]: DEBUG nova.network.neutron [-] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.421181] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33ebae4e-bbf0-4e2d-8562-e4786e9be6c8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.430723] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc91151-6635-487a-8611-8a20d63cc648 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.467310] env[68569]: DEBUG nova.compute.manager [req-7d00af2c-ab78-45ae-ae1d-9b27f11591d0 req-f01dedc7-f5a1-4a0d-bf39-332abeb16b54 service nova] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Detach interface failed, port_id=b8051e6d-d6a2-4d8a-8062-2d1a47aeb0d4, reason: Instance 2cde3729-1be6-42c5-891f-42a7a8bff267 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 742.510605] env[68569]: INFO nova.compute.manager [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Detaching volume f2b113a5-31d1-4fec-9883-264bb0b69860 [ 742.552670] env[68569]: INFO nova.virt.block_device [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Attempting to driver detach volume f2b113a5-31d1-4fec-9883-264bb0b69860 from mountpoint /dev/sdb [ 742.552941] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 742.553176] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633551', 'volume_id': 'f2b113a5-31d1-4fec-9883-264bb0b69860', 'name': 'volume-f2b113a5-31d1-4fec-9883-264bb0b69860', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '50b9775c-ddbd-4e8f-a2b8-b08c3028fc28', 'attached_at': '', 'detached_at': '', 'volume_id': 'f2b113a5-31d1-4fec-9883-264bb0b69860', 'serial': 'f2b113a5-31d1-4fec-9883-264bb0b69860'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 742.554046] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbdb850e-91b7-4861-8937-c3745bed2d87 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.562705] env[68569]: INFO nova.compute.manager [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Took 45.54 seconds to build instance. [ 742.580550] env[68569]: DEBUG nova.network.neutron [None req-86fab380-dc73-4155-b8a1-584d934cf8f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Updating instance_info_cache with network_info: [{"id": "502ae245-07dd-43f0-a1dc-c733e5f5cd3a", "address": "fa:16:3e:39:3c:0e", "network": {"id": "94e283f4-28e6-4cd4-b6f6-50bce5946d26", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1348032782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c45df010dfc40089844060df429bb31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap502ae245-07", "ovs_interfaceid": "502ae245-07dd-43f0-a1dc-c733e5f5cd3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.582746] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621dc77b-2bd8-41c7-9c6a-6aebc722c98d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.591593] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cfc171-a257-4a10-9bd6-f9dd167b19e4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.613470] env[68569]: DEBUG oslo_concurrency.lockutils [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.613892] env[68569]: DEBUG oslo_concurrency.lockutils [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.614259] env[68569]: DEBUG oslo_concurrency.lockutils [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.614610] env[68569]: DEBUG oslo_concurrency.lockutils [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.614955] env[68569]: DEBUG oslo_concurrency.lockutils [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.618213] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8583adb3-18b3-40ba-b6c9-48b44754b298 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.622264] env[68569]: DEBUG nova.compute.manager [None req-de780556-7677-4cfe-8246-db975a006b55 tempest-ServerExternalEventsTest-1991247061 tempest-ServerExternalEventsTest-1991247061-project] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Received event network-changed {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 742.622545] env[68569]: DEBUG nova.compute.manager [None req-de780556-7677-4cfe-8246-db975a006b55 tempest-ServerExternalEventsTest-1991247061 tempest-ServerExternalEventsTest-1991247061-project] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Refreshing instance network info cache due to event network-changed. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 742.623061] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de780556-7677-4cfe-8246-db975a006b55 tempest-ServerExternalEventsTest-1991247061 tempest-ServerExternalEventsTest-1991247061-project] Acquiring lock "refresh_cache-16b6fafe-524d-482f-961b-10e3601ac4c2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.623319] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de780556-7677-4cfe-8246-db975a006b55 tempest-ServerExternalEventsTest-1991247061 tempest-ServerExternalEventsTest-1991247061-project] Acquired lock "refresh_cache-16b6fafe-524d-482f-961b-10e3601ac4c2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.623583] env[68569]: DEBUG nova.network.neutron [None req-de780556-7677-4cfe-8246-db975a006b55 tempest-ServerExternalEventsTest-1991247061 tempest-ServerExternalEventsTest-1991247061-project] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 742.624983] env[68569]: INFO nova.compute.manager [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Terminating instance [ 742.650329] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] The volume has not been displaced from its original location: [datastore1] volume-f2b113a5-31d1-4fec-9883-264bb0b69860/volume-f2b113a5-31d1-4fec-9883-264bb0b69860.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 742.660065] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Reconfiguring VM instance instance-0000000f to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 742.661028] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84088f01-08e9-4539-993b-7a45ecfb28b4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.686895] env[68569]: WARNING oslo_messaging._drivers.amqpdriver [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 742.693270] env[68569]: DEBUG oslo_vmware.api [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Waiting for the task: (returnval){ [ 742.693270] env[68569]: value = "task-3166833" [ 742.693270] env[68569]: _type = "Task" [ 742.693270] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.703042] env[68569]: DEBUG oslo_vmware.api [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Task: {'id': task-3166833, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.759077] env[68569]: DEBUG nova.compute.utils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 742.763237] env[68569]: DEBUG nova.compute.manager [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 742.763894] env[68569]: DEBUG nova.network.neutron [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 742.800893] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5b3f8c8b-bbb3-4150-a023-989dfdd27b07 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "78f486aa-80f4-4d43-bd00-cc6206517a72" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.991s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.802976] env[68569]: DEBUG nova.policy [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '663e81274c5d4ecbb341ba970cc56dfa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd106cd175ad349bcada3d9c2bc9a40bb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 742.817575] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a155ec3-b71c-4140-98ae-a5a71e4a335e tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "c9264123-ab19-40d5-959a-791b8966d2f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.478s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.830471] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3f607b45-2f63-4dfd-8c5f-de77da622bf2 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "ec64b2fd-2409-4af1-8f51-cc0ccbba14f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.105s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.849677] env[68569]: INFO nova.compute.manager [-] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Took 1.55 seconds to deallocate network for instance. [ 743.083727] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f950d00-1ee6-4f2c-88b5-365be43ec460 tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Lock "16b6fafe-524d-482f-961b-10e3601ac4c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.827s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.087261] env[68569]: DEBUG oslo_concurrency.lockutils [None req-86fab380-dc73-4155-b8a1-584d934cf8f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Releasing lock "refresh_cache-de2b0206-0c73-4275-89ff-37199520dd71" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.131920] env[68569]: DEBUG nova.compute.manager [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 743.132497] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 743.134625] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12b64ea4-1366-4ae6-a137-656812d5944c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.145036] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 743.145036] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-107dd053-36ca-4262-b38a-bf82e1f01571 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.153782] env[68569]: DEBUG oslo_vmware.api [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 743.153782] env[68569]: value = "task-3166834" [ 743.153782] env[68569]: _type = "Task" [ 743.153782] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.161252] env[68569]: DEBUG oslo_vmware.api [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166834, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.193588] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Acquiring lock "16b6fafe-524d-482f-961b-10e3601ac4c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.193588] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Lock "16b6fafe-524d-482f-961b-10e3601ac4c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.193588] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Acquiring lock "16b6fafe-524d-482f-961b-10e3601ac4c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.193588] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Lock "16b6fafe-524d-482f-961b-10e3601ac4c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.193725] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Lock "16b6fafe-524d-482f-961b-10e3601ac4c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.194531] env[68569]: DEBUG nova.network.neutron [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Successfully created port: f51da7b0-c063-444e-9ff2-3b08c597f70c {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 743.199053] env[68569]: INFO nova.compute.manager [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Terminating instance [ 743.215272] env[68569]: DEBUG oslo_vmware.api [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Task: {'id': task-3166833, 'name': ReconfigVM_Task, 'duration_secs': 0.415029} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.215272] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Reconfigured VM instance instance-0000000f to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 743.222566] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ec12473-8300-4167-b82a-5866a9693127 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.242026] env[68569]: DEBUG oslo_vmware.api [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Waiting for the task: (returnval){ [ 743.242026] env[68569]: value = "task-3166835" [ 743.242026] env[68569]: _type = "Task" [ 743.242026] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.250094] env[68569]: DEBUG oslo_vmware.api [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Task: {'id': task-3166835, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.265619] env[68569]: DEBUG nova.compute.manager [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 743.422707] env[68569]: INFO nova.compute.manager [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Took 0.57 seconds to detach 1 volumes for instance. [ 743.425895] env[68569]: DEBUG nova.compute.manager [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Deleting volume: b88d036e-c8ce-4222-807f-25716446b927 {{(pid=68569) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 743.574483] env[68569]: DEBUG nova.network.neutron [None req-de780556-7677-4cfe-8246-db975a006b55 tempest-ServerExternalEventsTest-1991247061 tempest-ServerExternalEventsTest-1991247061-project] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Updating instance_info_cache with network_info: [{"id": "c60d4a60-c03c-4dcf-ad20-45e8627f3c8f", "address": "fa:16:3e:7d:1c:98", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc60d4a60-c0", "ovs_interfaceid": "c60d4a60-c03c-4dcf-ad20-45e8627f3c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.588697] env[68569]: DEBUG nova.compute.manager [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 743.672888] env[68569]: DEBUG oslo_vmware.api [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166834, 'name': PowerOffVM_Task, 'duration_secs': 0.328657} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.677610] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 743.677610] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 743.679338] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "c56e4282-b1ca-42f5-b346-692779475df0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.679338] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "c56e4282-b1ca-42f5-b346-692779475df0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.679338] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "c56e4282-b1ca-42f5-b346-692779475df0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.679338] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "c56e4282-b1ca-42f5-b346-692779475df0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.679508] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "c56e4282-b1ca-42f5-b346-692779475df0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.682799] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-edf11b7a-60a4-4e58-bbe0-961b13093c0d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.685749] env[68569]: INFO nova.compute.manager [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Terminating instance [ 743.710618] env[68569]: DEBUG nova.compute.manager [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 743.710870] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 743.711915] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0faa8211-1c0a-49dd-86aa-e50a6ed680e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.720366] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 743.720474] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00abfe84-dea7-4c06-9151-73d254eddff4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.725987] env[68569]: DEBUG oslo_vmware.api [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Waiting for the task: (returnval){ [ 743.725987] env[68569]: value = "task-3166838" [ 743.725987] env[68569]: _type = "Task" [ 743.725987] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.735812] env[68569]: DEBUG oslo_vmware.api [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166838, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.749822] env[68569]: DEBUG oslo_vmware.api [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Task: {'id': task-3166835, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.753859] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 743.754187] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 743.754312] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Deleting the datastore file [datastore2] 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 743.754563] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4cc596cb-81ae-4be7-8065-4adb3f4a0b42 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.760614] env[68569]: DEBUG oslo_vmware.api [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for the task: (returnval){ [ 743.760614] env[68569]: value = "task-3166839" [ 743.760614] env[68569]: _type = "Task" [ 743.760614] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.776324] env[68569]: DEBUG oslo_vmware.api [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166839, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.926446] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e32e17e-5dfb-43cf-9f7c-a75fdffae415 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.935585] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e223373a-dca1-48d9-9a21-7cb239e9b9ae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.967825] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ccc752-22e5-4bb3-b332-c2c166160922 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.976368] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3242d1-90c4-49b4-b77f-7823df8fdf7c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.993221] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.993690] env[68569]: DEBUG nova.compute.provider_tree [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.076175] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de780556-7677-4cfe-8246-db975a006b55 tempest-ServerExternalEventsTest-1991247061 tempest-ServerExternalEventsTest-1991247061-project] Releasing lock "refresh_cache-16b6fafe-524d-482f-961b-10e3601ac4c2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.102415] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-86fab380-dc73-4155-b8a1-584d934cf8f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 744.102415] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7ae98cd-92ac-4824-8239-15028d76bdbd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.109513] env[68569]: DEBUG oslo_vmware.api [None req-86fab380-dc73-4155-b8a1-584d934cf8f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 744.109513] env[68569]: value = "task-3166840" [ 744.109513] env[68569]: _type = "Task" [ 744.109513] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.118316] env[68569]: DEBUG oslo_vmware.api [None req-86fab380-dc73-4155-b8a1-584d934cf8f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166840, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.119483] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.191115] env[68569]: DEBUG nova.compute.manager [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 744.191346] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 744.193425] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6722d0-d035-4b6e-95b3-f785f9862902 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.200524] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "57a63648-83e9-4f23-aebc-050e58149ce2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.201513] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "57a63648-83e9-4f23-aebc-050e58149ce2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.205824] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 744.205992] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-590adb9b-403a-423a-916a-734680f7f3fb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.212156] env[68569]: DEBUG oslo_vmware.api [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 744.212156] env[68569]: value = "task-3166841" [ 744.212156] env[68569]: _type = "Task" [ 744.212156] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.220802] env[68569]: DEBUG oslo_vmware.api [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166841, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.226317] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "60aa85f3-edac-40e0-ad31-a8f93219e380" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.226471] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "60aa85f3-edac-40e0-ad31-a8f93219e380" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.236717] env[68569]: DEBUG oslo_vmware.api [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166838, 'name': PowerOffVM_Task, 'duration_secs': 0.19889} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.237150] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 744.237405] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 744.237773] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f774a23-d877-49c0-8123-c6ab80394d88 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.250271] env[68569]: DEBUG oslo_vmware.api [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Task: {'id': task-3166835, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.272278] env[68569]: DEBUG oslo_vmware.api [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Task: {'id': task-3166839, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.204536} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.274134] env[68569]: DEBUG nova.compute.manager [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 744.276890] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 744.277354] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 744.277411] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 744.277598] env[68569]: INFO nova.compute.manager [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Took 1.15 seconds to destroy the instance on the hypervisor. [ 744.278211] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 744.278621] env[68569]: DEBUG nova.compute.manager [-] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 744.278827] env[68569]: DEBUG nova.network.neutron [-] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 744.306634] env[68569]: DEBUG nova.virt.hardware [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 744.306634] env[68569]: DEBUG nova.virt.hardware [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 744.306941] env[68569]: DEBUG nova.virt.hardware [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 744.306941] env[68569]: DEBUG nova.virt.hardware [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 744.307097] env[68569]: DEBUG nova.virt.hardware [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 744.307296] env[68569]: DEBUG nova.virt.hardware [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 744.307747] env[68569]: DEBUG nova.virt.hardware [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 744.307747] env[68569]: DEBUG nova.virt.hardware [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 744.307912] env[68569]: DEBUG nova.virt.hardware [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 744.307970] env[68569]: DEBUG nova.virt.hardware [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 744.308165] env[68569]: DEBUG nova.virt.hardware [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 744.309216] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a02ec6d-4b3d-4904-99b1-5c30e23ae580 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.314051] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 744.314238] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 744.314451] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Deleting the datastore file [datastore1] 16b6fafe-524d-482f-961b-10e3601ac4c2 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 744.314699] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2bdac614-baa3-4668-b903-6e423d65ce8f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.320087] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621855d7-d0c7-44d9-96eb-cd24b83df6db {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.324930] env[68569]: DEBUG oslo_vmware.api [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Waiting for the task: (returnval){ [ 744.324930] env[68569]: value = "task-3166843" [ 744.324930] env[68569]: _type = "Task" [ 744.324930] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.342396] env[68569]: DEBUG oslo_vmware.api [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.497146] env[68569]: DEBUG nova.scheduler.client.report [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 744.575569] env[68569]: DEBUG nova.compute.manager [req-98730728-5920-4a76-ac6e-002a5bb3b70d req-f1a5b550-34a9-4b14-b697-e17bf3ca8760 service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Received event network-vif-deleted-50f8883e-b914-4589-ac89-c1ccfb5a91de {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 744.575773] env[68569]: INFO nova.compute.manager [req-98730728-5920-4a76-ac6e-002a5bb3b70d req-f1a5b550-34a9-4b14-b697-e17bf3ca8760 service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Neutron deleted interface 50f8883e-b914-4589-ac89-c1ccfb5a91de; detaching it from the instance and deleting it from the info cache [ 744.575927] env[68569]: DEBUG nova.network.neutron [req-98730728-5920-4a76-ac6e-002a5bb3b70d req-f1a5b550-34a9-4b14-b697-e17bf3ca8760 service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.619446] env[68569]: DEBUG oslo_vmware.api [None req-86fab380-dc73-4155-b8a1-584d934cf8f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166840, 'name': PowerOnVM_Task, 'duration_secs': 0.414193} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.619715] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-86fab380-dc73-4155-b8a1-584d934cf8f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 744.619912] env[68569]: DEBUG nova.compute.manager [None req-86fab380-dc73-4155-b8a1-584d934cf8f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 744.620721] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a50ba9-9c9d-409f-8258-e0f74f3713d5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.721755] env[68569]: DEBUG oslo_vmware.api [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166841, 'name': PowerOffVM_Task, 'duration_secs': 0.226377} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.722038] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 744.722215] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 744.722471] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d57ce794-e5db-4d3a-8187-069b337468fe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.750795] env[68569]: DEBUG oslo_vmware.api [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Task: {'id': task-3166835, 'name': ReconfigVM_Task, 'duration_secs': 1.01196} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.751112] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633551', 'volume_id': 'f2b113a5-31d1-4fec-9883-264bb0b69860', 'name': 'volume-f2b113a5-31d1-4fec-9883-264bb0b69860', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '50b9775c-ddbd-4e8f-a2b8-b08c3028fc28', 'attached_at': '', 'detached_at': '', 'volume_id': 'f2b113a5-31d1-4fec-9883-264bb0b69860', 'serial': 'f2b113a5-31d1-4fec-9883-264bb0b69860'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 744.801746] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 744.801965] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 744.802156] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Deleting the datastore file [datastore2] c56e4282-b1ca-42f5-b346-692779475df0 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 744.802577] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-769739c1-78e2-4f50-a556-9ff65f8c3ba4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.809144] env[68569]: DEBUG oslo_vmware.api [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for the task: (returnval){ [ 744.809144] env[68569]: value = "task-3166845" [ 744.809144] env[68569]: _type = "Task" [ 744.809144] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.816491] env[68569]: DEBUG oslo_vmware.api [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166845, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.833738] env[68569]: DEBUG oslo_vmware.api [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Task: {'id': task-3166843, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127451} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.833967] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 744.834159] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 744.834336] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 744.834511] env[68569]: INFO nova.compute.manager [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Took 1.12 seconds to destroy the instance on the hypervisor. [ 744.834786] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 744.835321] env[68569]: DEBUG nova.compute.manager [-] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 744.835321] env[68569]: DEBUG nova.network.neutron [-] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 745.003579] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.754s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.003579] env[68569]: DEBUG nova.compute.manager [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 745.007515] env[68569]: DEBUG oslo_concurrency.lockutils [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 34.990s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.056304] env[68569]: DEBUG nova.network.neutron [-] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.078434] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3d9eae6c-ce85-4b86-ae6a-7c78982b7629 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.088516] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374438d3-283a-4302-a8ae-94433bc66edd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.121557] env[68569]: DEBUG nova.compute.manager [req-98730728-5920-4a76-ac6e-002a5bb3b70d req-f1a5b550-34a9-4b14-b697-e17bf3ca8760 service nova] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Detach interface failed, port_id=50f8883e-b914-4589-ac89-c1ccfb5a91de, reason: Instance 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 745.246228] env[68569]: DEBUG nova.network.neutron [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Successfully updated port: f51da7b0-c063-444e-9ff2-3b08c597f70c {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 745.304083] env[68569]: DEBUG nova.objects.instance [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Lazy-loading 'flavor' on Instance uuid 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 745.318577] env[68569]: DEBUG oslo_vmware.api [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Task: {'id': task-3166845, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132487} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.318837] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 745.319030] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 745.319209] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 745.319410] env[68569]: INFO nova.compute.manager [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Took 1.13 seconds to destroy the instance on the hypervisor. [ 745.319653] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 745.319833] env[68569]: DEBUG nova.compute.manager [-] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 745.320319] env[68569]: DEBUG nova.network.neutron [-] [instance: c56e4282-b1ca-42f5-b346-692779475df0] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 745.511771] env[68569]: DEBUG nova.compute.utils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 745.513058] env[68569]: DEBUG nova.compute.manager [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 745.513218] env[68569]: DEBUG nova.network.neutron [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 745.522199] env[68569]: INFO nova.compute.claims [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 745.560316] env[68569]: INFO nova.compute.manager [-] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Took 1.28 seconds to deallocate network for instance. [ 745.599178] env[68569]: DEBUG nova.policy [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c9d77d804154a199681132cb34bf626', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8bd8ff748a34e7a83ec0edbfa148aac', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 745.752237] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Acquiring lock "refresh_cache-9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.752445] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Acquired lock "refresh_cache-9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.752633] env[68569]: DEBUG nova.network.neutron [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.766167] env[68569]: DEBUG nova.network.neutron [-] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.993449] env[68569]: DEBUG nova.network.neutron [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Successfully created port: 2d1e0d6d-0c37-425c-a138-7bc79f96a3a2 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 746.029549] env[68569]: DEBUG nova.compute.manager [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 746.033399] env[68569]: INFO nova.compute.resource_tracker [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Updating resource usage from migration aab3fe80-9a23-44ba-bdb2-b6166fdccf6c [ 746.069021] env[68569]: DEBUG oslo_concurrency.lockutils [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.194712] env[68569]: DEBUG nova.network.neutron [-] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.270121] env[68569]: INFO nova.compute.manager [-] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Took 1.43 seconds to deallocate network for instance. [ 746.311487] env[68569]: DEBUG oslo_concurrency.lockutils [None req-09c788c0-073e-4f23-89e4-7c5869c5caf3 tempest-VolumesAssistedSnapshotsTest-146027044 tempest-VolumesAssistedSnapshotsTest-146027044-project-admin] Lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.308s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.323431] env[68569]: DEBUG nova.network.neutron [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.549367] env[68569]: DEBUG nova.network.neutron [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Updating instance_info_cache with network_info: [{"id": "f51da7b0-c063-444e-9ff2-3b08c597f70c", "address": "fa:16:3e:59:0d:97", "network": {"id": "4f17a4eb-cdda-4db3-994c-ccffdd1faedc", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1160351331-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d106cd175ad349bcada3d9c2bc9a40bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c02dd284-ab80-451c-93eb-48c8360acb9c", "external-id": "nsx-vlan-transportzone-818", "segmentation_id": 818, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf51da7b0-c0", "ovs_interfaceid": "f51da7b0-c063-444e-9ff2-3b08c597f70c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.635227] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c02485-d436-4e18-b1b3-879f1251374a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.644414] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aedc30cb-223a-4bde-a0fc-be9a550d69c6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.674257] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d94369-2cc8-43ee-91c4-aa9fed0980c3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.681448] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db05f5f3-f2fb-40f0-8627-807d16388e60 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.695198] env[68569]: DEBUG nova.compute.provider_tree [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.703206] env[68569]: INFO nova.compute.manager [-] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Took 1.38 seconds to deallocate network for instance. [ 746.730315] env[68569]: DEBUG nova.compute.manager [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Received event network-vif-plugged-f51da7b0-c063-444e-9ff2-3b08c597f70c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 746.730315] env[68569]: DEBUG oslo_concurrency.lockutils [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] Acquiring lock "9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.730315] env[68569]: DEBUG oslo_concurrency.lockutils [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] Lock "9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.730315] env[68569]: DEBUG oslo_concurrency.lockutils [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] Lock "9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.730710] env[68569]: DEBUG nova.compute.manager [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] No waiting events found dispatching network-vif-plugged-f51da7b0-c063-444e-9ff2-3b08c597f70c {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 746.730988] env[68569]: WARNING nova.compute.manager [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Received unexpected event network-vif-plugged-f51da7b0-c063-444e-9ff2-3b08c597f70c for instance with vm_state building and task_state spawning. [ 746.731355] env[68569]: DEBUG nova.compute.manager [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Received event network-changed-f51da7b0-c063-444e-9ff2-3b08c597f70c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 746.731625] env[68569]: DEBUG nova.compute.manager [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Refreshing instance network info cache due to event network-changed-f51da7b0-c063-444e-9ff2-3b08c597f70c. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 746.731927] env[68569]: DEBUG oslo_concurrency.lockutils [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] Acquiring lock "refresh_cache-9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.776737] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.011864] env[68569]: DEBUG nova.objects.instance [None req-f4fdd883-fe06-4fc4-b903-b94bc9bcd406 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Lazy-loading 'flavor' on Instance uuid ad207187-634f-4e7f-9809-eb3f742ddeec {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 747.043243] env[68569]: DEBUG nova.compute.manager [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 747.055347] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Releasing lock "refresh_cache-9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.055347] env[68569]: DEBUG nova.compute.manager [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Instance network_info: |[{"id": "f51da7b0-c063-444e-9ff2-3b08c597f70c", "address": "fa:16:3e:59:0d:97", "network": {"id": "4f17a4eb-cdda-4db3-994c-ccffdd1faedc", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1160351331-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d106cd175ad349bcada3d9c2bc9a40bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c02dd284-ab80-451c-93eb-48c8360acb9c", "external-id": "nsx-vlan-transportzone-818", "segmentation_id": 818, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf51da7b0-c0", "ovs_interfaceid": "f51da7b0-c063-444e-9ff2-3b08c597f70c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 747.055576] env[68569]: DEBUG oslo_concurrency.lockutils [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] Acquired lock "refresh_cache-9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.055576] env[68569]: DEBUG nova.network.neutron [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Refreshing network info cache for port f51da7b0-c063-444e-9ff2-3b08c597f70c {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 747.056448] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:0d:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c02dd284-ab80-451c-93eb-48c8360acb9c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f51da7b0-c063-444e-9ff2-3b08c597f70c', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 747.065456] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Creating folder: Project (d106cd175ad349bcada3d9c2bc9a40bb). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 747.070062] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60a54c6a-9c07-4eda-9b28-53ff9fb29847 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.078164] env[68569]: DEBUG nova.virt.hardware [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 747.078343] env[68569]: DEBUG nova.virt.hardware [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 747.078442] env[68569]: DEBUG nova.virt.hardware [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 747.078610] env[68569]: DEBUG nova.virt.hardware [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 747.078750] env[68569]: DEBUG nova.virt.hardware [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 747.078897] env[68569]: DEBUG nova.virt.hardware [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 747.079121] env[68569]: DEBUG nova.virt.hardware [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 747.079280] env[68569]: DEBUG nova.virt.hardware [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 747.079470] env[68569]: DEBUG nova.virt.hardware [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 747.079636] env[68569]: DEBUG nova.virt.hardware [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 747.079861] env[68569]: DEBUG nova.virt.hardware [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 747.080900] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ff9622-bf13-4ec2-92ee-1b64767b4eea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.084868] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Created folder: Project (d106cd175ad349bcada3d9c2bc9a40bb) in parent group-v633430. [ 747.085063] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Creating folder: Instances. Parent ref: group-v633555. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 747.085612] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17a4f61b-6ce1-44c6-a8ee-9cf33da3961c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.092118] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33abe0bf-952c-42d3-a3e4-b0f924601327 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.097724] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Created folder: Instances in parent group-v633555. [ 747.097949] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 747.105409] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 747.107774] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3003b33b-b37a-4da0-a8a5-4cd77f1eaaba {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.129293] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 747.129293] env[68569]: value = "task-3166849" [ 747.129293] env[68569]: _type = "Task" [ 747.129293] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.136401] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166849, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.202756] env[68569]: DEBUG nova.scheduler.client.report [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 747.209728] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.312841] env[68569]: DEBUG nova.network.neutron [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Updated VIF entry in instance network info cache for port f51da7b0-c063-444e-9ff2-3b08c597f70c. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 747.313440] env[68569]: DEBUG nova.network.neutron [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Updating instance_info_cache with network_info: [{"id": "f51da7b0-c063-444e-9ff2-3b08c597f70c", "address": "fa:16:3e:59:0d:97", "network": {"id": "4f17a4eb-cdda-4db3-994c-ccffdd1faedc", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1160351331-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d106cd175ad349bcada3d9c2bc9a40bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c02dd284-ab80-451c-93eb-48c8360acb9c", "external-id": "nsx-vlan-transportzone-818", "segmentation_id": 818, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf51da7b0-c0", "ovs_interfaceid": "f51da7b0-c063-444e-9ff2-3b08c597f70c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.523229] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4fdd883-fe06-4fc4-b903-b94bc9bcd406 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquiring lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.523342] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4fdd883-fe06-4fc4-b903-b94bc9bcd406 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquired lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.640145] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166849, 'name': CreateVM_Task, 'duration_secs': 0.408526} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.640866] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 747.641604] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.641950] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.642256] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 747.642808] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c212ce09-e445-4e9f-a08f-983b5b8c8dfd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.647251] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Waiting for the task: (returnval){ [ 747.647251] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52472097-1c44-d1fd-2b90-af49b5a29287" [ 747.647251] env[68569]: _type = "Task" [ 747.647251] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.655284] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52472097-1c44-d1fd-2b90-af49b5a29287, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.707703] env[68569]: DEBUG oslo_concurrency.lockutils [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.700s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.707915] env[68569]: INFO nova.compute.manager [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Migrating [ 747.717253] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.998s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.718716] env[68569]: INFO nova.compute.claims [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 747.819472] env[68569]: DEBUG oslo_concurrency.lockutils [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] Releasing lock "refresh_cache-9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.819769] env[68569]: DEBUG nova.compute.manager [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Received event network-vif-deleted-c60d4a60-c03c-4dcf-ad20-45e8627f3c8f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 747.819952] env[68569]: DEBUG nova.compute.manager [req-1787b273-2336-407e-9c41-26c5194bc672 req-9c4a8b1d-9059-406e-9eb8-4891846eff64 service nova] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Received event network-vif-deleted-eacf90a1-83a1-4998-918b-84e4dcf8a668 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 747.887944] env[68569]: DEBUG nova.network.neutron [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Successfully updated port: 2d1e0d6d-0c37-425c-a138-7bc79f96a3a2 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 748.129407] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "0c4d4d93-89bf-4164-973b-af48278a3915" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.129630] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "0c4d4d93-89bf-4164-973b-af48278a3915" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.129840] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "0c4d4d93-89bf-4164-973b-af48278a3915-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.130063] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "0c4d4d93-89bf-4164-973b-af48278a3915-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.130260] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "0c4d4d93-89bf-4164-973b-af48278a3915-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.132489] env[68569]: INFO nova.compute.manager [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Terminating instance [ 748.136474] env[68569]: DEBUG nova.network.neutron [None req-f4fdd883-fe06-4fc4-b903-b94bc9bcd406 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.162396] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52472097-1c44-d1fd-2b90-af49b5a29287, 'name': SearchDatastore_Task, 'duration_secs': 0.010399} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.162694] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.162913] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 748.163162] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.163300] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.163518] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.163788] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e62095e9-0a47-45d8-9ffc-6e4622f622d0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.173502] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.173502] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 748.174237] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af85bf3f-db15-4a3b-8f71-388e66c6a321 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.180114] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Waiting for the task: (returnval){ [ 748.180114] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52129755-acb2-5a94-1af1-fa3b475e6dbf" [ 748.180114] env[68569]: _type = "Task" [ 748.180114] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.188598] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52129755-acb2-5a94-1af1-fa3b475e6dbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.231108] env[68569]: DEBUG oslo_concurrency.lockutils [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "refresh_cache-912303de-a79d-41b0-ab44-c79e850a4dee" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.231290] env[68569]: DEBUG oslo_concurrency.lockutils [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquired lock "refresh_cache-912303de-a79d-41b0-ab44-c79e850a4dee" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.231465] env[68569]: DEBUG nova.network.neutron [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.390424] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "refresh_cache-398dd3c7-c630-4a29-b204-80f6fb394ce8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.390717] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "refresh_cache-398dd3c7-c630-4a29-b204-80f6fb394ce8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.390863] env[68569]: DEBUG nova.network.neutron [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.642054] env[68569]: DEBUG nova.compute.manager [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 748.642341] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 748.643047] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531728b1-695e-4e79-a9f7-212b02f03f04 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.650897] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 748.651134] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3fcc08a5-30c3-473d-b09e-08f59cc4502c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.657695] env[68569]: DEBUG oslo_vmware.api [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 748.657695] env[68569]: value = "task-3166850" [ 748.657695] env[68569]: _type = "Task" [ 748.657695] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.665228] env[68569]: DEBUG oslo_vmware.api [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166850, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.689866] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52129755-acb2-5a94-1af1-fa3b475e6dbf, 'name': SearchDatastore_Task, 'duration_secs': 0.012053} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.690741] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af8dffd0-bf93-4009-8280-80e79363a42b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.696120] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Waiting for the task: (returnval){ [ 748.696120] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529a1229-ae2a-f40e-5103-1f194490358c" [ 748.696120] env[68569]: _type = "Task" [ 748.696120] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.703804] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529a1229-ae2a-f40e-5103-1f194490358c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.763579] env[68569]: DEBUG nova.compute.manager [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Received event network-vif-plugged-2d1e0d6d-0c37-425c-a138-7bc79f96a3a2 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 748.763579] env[68569]: DEBUG oslo_concurrency.lockutils [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] Acquiring lock "398dd3c7-c630-4a29-b204-80f6fb394ce8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.763579] env[68569]: DEBUG oslo_concurrency.lockutils [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] Lock "398dd3c7-c630-4a29-b204-80f6fb394ce8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.763805] env[68569]: DEBUG oslo_concurrency.lockutils [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] Lock "398dd3c7-c630-4a29-b204-80f6fb394ce8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.763919] env[68569]: DEBUG nova.compute.manager [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] No waiting events found dispatching network-vif-plugged-2d1e0d6d-0c37-425c-a138-7bc79f96a3a2 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 748.764081] env[68569]: WARNING nova.compute.manager [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Received unexpected event network-vif-plugged-2d1e0d6d-0c37-425c-a138-7bc79f96a3a2 for instance with vm_state building and task_state spawning. [ 748.764288] env[68569]: DEBUG nova.compute.manager [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Received event network-changed-2d1e0d6d-0c37-425c-a138-7bc79f96a3a2 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 748.765229] env[68569]: DEBUG nova.compute.manager [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Refreshing instance network info cache due to event network-changed-2d1e0d6d-0c37-425c-a138-7bc79f96a3a2. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 748.765229] env[68569]: DEBUG oslo_concurrency.lockutils [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] Acquiring lock "refresh_cache-398dd3c7-c630-4a29-b204-80f6fb394ce8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.944058] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Acquiring lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.944318] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.944518] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Acquiring lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.944695] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.944861] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.947098] env[68569]: INFO nova.compute.manager [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Terminating instance [ 749.049998] env[68569]: DEBUG nova.network.neutron [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.170313] env[68569]: DEBUG oslo_vmware.api [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166850, 'name': PowerOffVM_Task, 'duration_secs': 0.201921} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.170313] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 749.170440] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 749.170733] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01c03b84-fbac-4e37-b483-548351ead8af {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.219023] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529a1229-ae2a-f40e-5103-1f194490358c, 'name': SearchDatastore_Task, 'duration_secs': 0.00939} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.219023] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.219023] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0/9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 749.219023] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f6c9ca6-c9ff-44a1-9451-9cb5f13a08e4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.228857] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Waiting for the task: (returnval){ [ 749.228857] env[68569]: value = "task-3166852" [ 749.228857] env[68569]: _type = "Task" [ 749.228857] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.235932] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 749.236259] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 749.236481] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Deleting the datastore file [datastore2] 0c4d4d93-89bf-4164-973b-af48278a3915 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 749.236748] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-105e42f4-f8f0-4c49-9922-98405f800592 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.241525] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166852, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.248464] env[68569]: DEBUG oslo_vmware.api [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 749.248464] env[68569]: value = "task-3166853" [ 749.248464] env[68569]: _type = "Task" [ 749.248464] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.263053] env[68569]: DEBUG oslo_vmware.api [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166853, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.337755] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242e3829-2ac5-471e-ba3b-8d4cdae69d6d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.345229] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d77d5c7-a6fc-47ce-a066-27863f9bee57 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.381985] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e17fe6-9175-4c3d-a002-8e833225b16b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.390309] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44e71bf-bc32-40fd-84a6-1b98a299d20c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.405090] env[68569]: DEBUG nova.compute.provider_tree [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.415580] env[68569]: DEBUG nova.network.neutron [None req-f4fdd883-fe06-4fc4-b903-b94bc9bcd406 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Updating instance_info_cache with network_info: [{"id": "17651803-ec04-4913-b621-e685c574de0c", "address": "fa:16:3e:d8:ec:c5", "network": {"id": "daa85e6b-305e-4981-87a7-9c7e6cdd4113", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-224395556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de1ec9d4fc3e45afb009f75ac86d5f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17651803-ec", "ovs_interfaceid": "17651803-ec04-4913-b621-e685c574de0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.442534] env[68569]: DEBUG nova.network.neutron [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Updating instance_info_cache with network_info: [{"id": "2d1e0d6d-0c37-425c-a138-7bc79f96a3a2", "address": "fa:16:3e:2d:8c:c5", "network": {"id": "cf9f728e-bea5-495e-8933-c454fb6f87a7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1703203184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8bd8ff748a34e7a83ec0edbfa148aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d1e0d6d-0c", "ovs_interfaceid": "2d1e0d6d-0c37-425c-a138-7bc79f96a3a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.449877] env[68569]: DEBUG nova.compute.manager [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 749.450124] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 749.451212] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5365388a-c383-47c0-85db-c1dad1bf5ceb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.461748] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 749.462800] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dfd8f6e4-f875-4b1b-ab67-e9332f7a0bf8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.469203] env[68569]: DEBUG oslo_vmware.api [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Waiting for the task: (returnval){ [ 749.469203] env[68569]: value = "task-3166854" [ 749.469203] env[68569]: _type = "Task" [ 749.469203] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.473581] env[68569]: DEBUG nova.network.neutron [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Updating instance_info_cache with network_info: [{"id": "1578930e-ed30-4f23-9d6a-55f02a439b25", "address": "fa:16:3e:b1:40:1d", "network": {"id": "30ac4d93-1a6f-4c74-bb05-37b8ab82fe8f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f32ccefd074040788fb3a7a857f48173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1578930e-ed", "ovs_interfaceid": "1578930e-ed30-4f23-9d6a-55f02a439b25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.480787] env[68569]: DEBUG oslo_vmware.api [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166854, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.739134] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166852, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486096} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.739398] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0/9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 749.739398] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 749.739641] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d41ffed-6471-491e-930b-40fdc7c2e9f4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.745278] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Waiting for the task: (returnval){ [ 749.745278] env[68569]: value = "task-3166855" [ 749.745278] env[68569]: _type = "Task" [ 749.745278] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.755780] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166855, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.758631] env[68569]: DEBUG oslo_vmware.api [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166853, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164075} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.758860] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 749.759046] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 749.759224] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 749.759392] env[68569]: INFO nova.compute.manager [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Took 1.12 seconds to destroy the instance on the hypervisor. [ 749.759802] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 749.759888] env[68569]: DEBUG nova.compute.manager [-] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 749.759927] env[68569]: DEBUG nova.network.neutron [-] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 749.908254] env[68569]: DEBUG nova.scheduler.client.report [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 749.917885] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4fdd883-fe06-4fc4-b903-b94bc9bcd406 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Releasing lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.918132] env[68569]: DEBUG nova.compute.manager [None req-f4fdd883-fe06-4fc4-b903-b94bc9bcd406 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Inject network info {{(pid=68569) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7744}} [ 749.918400] env[68569]: DEBUG nova.compute.manager [None req-f4fdd883-fe06-4fc4-b903-b94bc9bcd406 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] network_info to inject: |[{"id": "17651803-ec04-4913-b621-e685c574de0c", "address": "fa:16:3e:d8:ec:c5", "network": {"id": "daa85e6b-305e-4981-87a7-9c7e6cdd4113", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-224395556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de1ec9d4fc3e45afb009f75ac86d5f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17651803-ec", "ovs_interfaceid": "17651803-ec04-4913-b621-e685c574de0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7745}} [ 749.924185] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f4fdd883-fe06-4fc4-b903-b94bc9bcd406 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Reconfiguring VM instance to set the machine id {{(pid=68569) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 749.924185] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32869714-7cd9-42e5-ad4d-123f86f2db68 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.940210] env[68569]: DEBUG oslo_vmware.api [None req-f4fdd883-fe06-4fc4-b903-b94bc9bcd406 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Waiting for the task: (returnval){ [ 749.940210] env[68569]: value = "task-3166856" [ 749.940210] env[68569]: _type = "Task" [ 749.940210] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.944512] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "refresh_cache-398dd3c7-c630-4a29-b204-80f6fb394ce8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.945102] env[68569]: DEBUG nova.compute.manager [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Instance network_info: |[{"id": "2d1e0d6d-0c37-425c-a138-7bc79f96a3a2", "address": "fa:16:3e:2d:8c:c5", "network": {"id": "cf9f728e-bea5-495e-8933-c454fb6f87a7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1703203184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8bd8ff748a34e7a83ec0edbfa148aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d1e0d6d-0c", "ovs_interfaceid": "2d1e0d6d-0c37-425c-a138-7bc79f96a3a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 749.945102] env[68569]: DEBUG oslo_concurrency.lockutils [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] Acquired lock "refresh_cache-398dd3c7-c630-4a29-b204-80f6fb394ce8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.945285] env[68569]: DEBUG nova.network.neutron [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Refreshing network info cache for port 2d1e0d6d-0c37-425c-a138-7bc79f96a3a2 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 749.946289] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:8c:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd33839ae-40ca-471b-92e3-eb282b920682', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d1e0d6d-0c37-425c-a138-7bc79f96a3a2', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 749.955186] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Creating folder: Project (c8bd8ff748a34e7a83ec0edbfa148aac). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 749.962404] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-baa029fd-7d85-42ee-9d16-87bfdf375da7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.965049] env[68569]: DEBUG oslo_vmware.api [None req-f4fdd883-fe06-4fc4-b903-b94bc9bcd406 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166856, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.975098] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Created folder: Project (c8bd8ff748a34e7a83ec0edbfa148aac) in parent group-v633430. [ 749.975294] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Creating folder: Instances. Parent ref: group-v633558. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 749.975861] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a0a5baa-f24d-4304-bed2-c9a2e757b487 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.980668] env[68569]: DEBUG oslo_concurrency.lockutils [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Releasing lock "refresh_cache-912303de-a79d-41b0-ab44-c79e850a4dee" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.982029] env[68569]: DEBUG oslo_vmware.api [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166854, 'name': PowerOffVM_Task, 'duration_secs': 0.220138} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.982870] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 749.983097] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 749.983586] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-711fa7e0-e517-44b4-ab6f-31e106c54665 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.990135] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Created folder: Instances in parent group-v633558. [ 749.990407] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 749.990638] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 749.990867] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ff8b366-9517-4a00-b9f7-19c9ccc8b880 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.015569] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.015569] env[68569]: value = "task-3166860" [ 750.015569] env[68569]: _type = "Task" [ 750.015569] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.023146] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166860, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.047267] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 750.047380] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 750.047634] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Deleting the datastore file [datastore1] 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 750.047918] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9356ac30-141a-40f5-a446-78cf26cf6a82 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.054723] env[68569]: DEBUG oslo_vmware.api [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Waiting for the task: (returnval){ [ 750.054723] env[68569]: value = "task-3166861" [ 750.054723] env[68569]: _type = "Task" [ 750.054723] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.231428] env[68569]: DEBUG nova.network.neutron [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Updated VIF entry in instance network info cache for port 2d1e0d6d-0c37-425c-a138-7bc79f96a3a2. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 750.231985] env[68569]: DEBUG nova.network.neutron [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Updating instance_info_cache with network_info: [{"id": "2d1e0d6d-0c37-425c-a138-7bc79f96a3a2", "address": "fa:16:3e:2d:8c:c5", "network": {"id": "cf9f728e-bea5-495e-8933-c454fb6f87a7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1703203184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8bd8ff748a34e7a83ec0edbfa148aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d1e0d6d-0c", "ovs_interfaceid": "2d1e0d6d-0c37-425c-a138-7bc79f96a3a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.256838] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166855, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062451} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.257134] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 750.257954] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1524ef-16a0-473e-9e4a-946aad77ed07 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.284939] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0/9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 750.285326] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4946e5d1-e52b-4a52-8428-57da0f9246a2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.306223] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Waiting for the task: (returnval){ [ 750.306223] env[68569]: value = "task-3166862" [ 750.306223] env[68569]: _type = "Task" [ 750.306223] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.315779] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166862, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.414039] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.697s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.415246] env[68569]: DEBUG nova.compute.manager [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 750.417243] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.575s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.418720] env[68569]: INFO nova.compute.claims [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 750.450392] env[68569]: DEBUG oslo_vmware.api [None req-f4fdd883-fe06-4fc4-b903-b94bc9bcd406 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166856, 'name': ReconfigVM_Task, 'duration_secs': 0.160015} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.451633] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f4fdd883-fe06-4fc4-b903-b94bc9bcd406 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Reconfigured VM instance to set the machine id {{(pid=68569) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 750.511365] env[68569]: DEBUG nova.network.neutron [-] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.525500] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166860, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.564899] env[68569]: DEBUG oslo_vmware.api [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Task: {'id': task-3166861, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164388} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.564899] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 750.565143] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 750.565202] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 750.565352] env[68569]: INFO nova.compute.manager [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Took 1.12 seconds to destroy the instance on the hypervisor. [ 750.565594] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 750.565774] env[68569]: DEBUG nova.compute.manager [-] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 750.565931] env[68569]: DEBUG nova.network.neutron [-] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 750.612374] env[68569]: DEBUG nova.objects.instance [None req-8690cf5b-435b-422c-9baa-0b6a5e02c974 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Lazy-loading 'flavor' on Instance uuid ad207187-634f-4e7f-9809-eb3f742ddeec {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 750.735025] env[68569]: DEBUG oslo_concurrency.lockutils [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] Releasing lock "refresh_cache-398dd3c7-c630-4a29-b204-80f6fb394ce8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.735025] env[68569]: DEBUG nova.compute.manager [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Received event network-changed-17651803-ec04-4913-b621-e685c574de0c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 750.735305] env[68569]: DEBUG nova.compute.manager [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Refreshing instance network info cache due to event network-changed-17651803-ec04-4913-b621-e685c574de0c. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 750.735446] env[68569]: DEBUG oslo_concurrency.lockutils [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] Acquiring lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.735606] env[68569]: DEBUG oslo_concurrency.lockutils [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] Acquired lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.735746] env[68569]: DEBUG nova.network.neutron [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Refreshing network info cache for port 17651803-ec04-4913-b621-e685c574de0c {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 750.819492] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166862, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.925470] env[68569]: DEBUG nova.compute.utils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 750.927429] env[68569]: DEBUG nova.compute.manager [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 750.928402] env[68569]: DEBUG nova.network.neutron [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 750.999018] env[68569]: DEBUG nova.policy [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c9d77d804154a199681132cb34bf626', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8bd8ff748a34e7a83ec0edbfa148aac', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 751.012888] env[68569]: INFO nova.compute.manager [-] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Took 1.25 seconds to deallocate network for instance. [ 751.030937] env[68569]: DEBUG nova.compute.manager [req-a0b49a8e-e653-41c7-804e-6fc0db7a45e7 req-1cbef8df-05ef-430e-9a85-79af7607a47f service nova] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Received event network-vif-deleted-44f67a03-e572-481c-bda6-954144ca8539 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 751.043333] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166860, 'name': CreateVM_Task, 'duration_secs': 0.69974} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.043530] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 751.044192] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.044357] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.044672] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 751.044925] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e536a51d-3525-449c-b090-ec7946067d41 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.055251] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 751.055251] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]528b2660-2344-e5fd-5abd-43607806aff0" [ 751.055251] env[68569]: _type = "Task" [ 751.055251] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.064400] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]528b2660-2344-e5fd-5abd-43607806aff0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.116785] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8690cf5b-435b-422c-9baa-0b6a5e02c974 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquiring lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.316756] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166862, 'name': ReconfigVM_Task, 'duration_secs': 0.985376} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.317196] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0/9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 751.318182] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c91cd2c-1792-49fc-b847-7dfd92aaa9ab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.327687] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Waiting for the task: (returnval){ [ 751.327687] env[68569]: value = "task-3166863" [ 751.327687] env[68569]: _type = "Task" [ 751.327687] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.336892] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166863, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.434342] env[68569]: DEBUG nova.compute.manager [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 751.533298] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.536983] env[68569]: DEBUG nova.network.neutron [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Successfully created port: 5f4944ec-908c-452e-9cdf-7b42d3277124 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 751.567755] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]528b2660-2344-e5fd-5abd-43607806aff0, 'name': SearchDatastore_Task, 'duration_secs': 0.010662} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.568053] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.568307] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 751.568543] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.568689] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.568952] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 751.569169] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-593556c6-c971-4b51-855f-23aa721b7a74 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.580232] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 751.580424] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 751.581146] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65ad3ea5-ea71-4e1b-bc90-154c1bbbe321 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.588961] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 751.588961] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5201f9a1-8204-c3c3-4cb0-f6e2f5f1a1b9" [ 751.588961] env[68569]: _type = "Task" [ 751.588961] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.597912] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5201f9a1-8204-c3c3-4cb0-f6e2f5f1a1b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.656194] env[68569]: DEBUG nova.network.neutron [-] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.705315] env[68569]: DEBUG nova.network.neutron [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Updated VIF entry in instance network info cache for port 17651803-ec04-4913-b621-e685c574de0c. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 751.706239] env[68569]: DEBUG nova.network.neutron [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Updating instance_info_cache with network_info: [{"id": "17651803-ec04-4913-b621-e685c574de0c", "address": "fa:16:3e:d8:ec:c5", "network": {"id": "daa85e6b-305e-4981-87a7-9c7e6cdd4113", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-224395556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de1ec9d4fc3e45afb009f75ac86d5f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17651803-ec", "ovs_interfaceid": "17651803-ec04-4913-b621-e685c574de0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.838156] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166863, 'name': Rename_Task, 'duration_secs': 0.188669} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.841176] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 751.841615] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7e14b34a-4de8-4648-9d43-876c550c1f20 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.848613] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Waiting for the task: (returnval){ [ 751.848613] env[68569]: value = "task-3166864" [ 751.848613] env[68569]: _type = "Task" [ 751.848613] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.861836] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166864, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.036075] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051cede9-d0f8-4e80-82e8-f5be46e356f0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.044416] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9ce832-c22c-4e80-a5e7-7399e8b14190 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.075203] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859ad5f7-a50d-40f8-aa4e-970a9d85aaa2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.082809] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f2a64d-5924-4460-9b2b-b34990a802e2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.096760] env[68569]: DEBUG nova.compute.provider_tree [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 752.105056] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5201f9a1-8204-c3c3-4cb0-f6e2f5f1a1b9, 'name': SearchDatastore_Task, 'duration_secs': 0.010154} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.106357] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6878a566-9ca1-448c-aa31-afb22d6875ba {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.111934] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 752.111934] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52658f07-8eb5-ab41-6d77-6f67db33e915" [ 752.111934] env[68569]: _type = "Task" [ 752.111934] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.119284] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52658f07-8eb5-ab41-6d77-6f67db33e915, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.158498] env[68569]: INFO nova.compute.manager [-] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Took 1.59 seconds to deallocate network for instance. [ 752.215908] env[68569]: DEBUG oslo_concurrency.lockutils [req-b0e7389d-1ee7-4886-a990-173a8f462092 req-6f5a74a3-2357-43c9-9b06-f01ce2508647 service nova] Releasing lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.215908] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8690cf5b-435b-422c-9baa-0b6a5e02c974 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquired lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.358467] env[68569]: DEBUG oslo_vmware.api [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166864, 'name': PowerOnVM_Task, 'duration_secs': 0.467362} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.358674] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 752.358869] env[68569]: INFO nova.compute.manager [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Took 8.08 seconds to spawn the instance on the hypervisor. [ 752.359060] env[68569]: DEBUG nova.compute.manager [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 752.359844] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983ffcd1-1f23-472a-a30d-4f6e57f22701 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.442539] env[68569]: DEBUG nova.compute.manager [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 752.469121] env[68569]: DEBUG nova.virt.hardware [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 752.470421] env[68569]: DEBUG nova.virt.hardware [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 752.470421] env[68569]: DEBUG nova.virt.hardware [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 752.470421] env[68569]: DEBUG nova.virt.hardware [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 752.470421] env[68569]: DEBUG nova.virt.hardware [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 752.470616] env[68569]: DEBUG nova.virt.hardware [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 752.470937] env[68569]: DEBUG nova.virt.hardware [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 752.471223] env[68569]: DEBUG nova.virt.hardware [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 752.471833] env[68569]: DEBUG nova.virt.hardware [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 752.471833] env[68569]: DEBUG nova.virt.hardware [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 752.472129] env[68569]: DEBUG nova.virt.hardware [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 752.473743] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d5fcb5-a4b0-4ab1-a2b5-6d3818eeab3d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.486785] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42597fb-1e46-491e-a57b-255c4f89b348 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.505997] env[68569]: ERROR nova.compute.manager [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Setting instance vm_state to ERROR: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 752.505997] env[68569]: ERROR nova.compute.manager [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Traceback (most recent call last): [ 752.505997] env[68569]: ERROR nova.compute.manager [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] File "/opt/stack/nova/nova/compute/manager.py", line 11478, in _error_out_instance_on_exception [ 752.505997] env[68569]: ERROR nova.compute.manager [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] yield [ 752.505997] env[68569]: ERROR nova.compute.manager [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 752.505997] env[68569]: ERROR nova.compute.manager [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] disk_info = self.driver.migrate_disk_and_power_off( [ 752.505997] env[68569]: ERROR nova.compute.manager [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 752.505997] env[68569]: ERROR nova.compute.manager [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] [ 752.564242] env[68569]: DEBUG nova.network.neutron [None req-8690cf5b-435b-422c-9baa-0b6a5e02c974 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 752.622412] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52658f07-8eb5-ab41-6d77-6f67db33e915, 'name': SearchDatastore_Task, 'duration_secs': 0.008494} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.623307] env[68569]: ERROR nova.scheduler.client.report [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [req-b6600c6f-87fc-4564-b9fe-8d4448b12d8c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b6600c6f-87fc-4564-b9fe-8d4448b12d8c"}]} [ 752.623658] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.623982] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 398dd3c7-c630-4a29-b204-80f6fb394ce8/398dd3c7-c630-4a29-b204-80f6fb394ce8.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 752.626244] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3d27a27-b385-41bb-9adc-30d84310f38f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.633392] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 752.633392] env[68569]: value = "task-3166865" [ 752.633392] env[68569]: _type = "Task" [ 752.633392] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.641571] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166865, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.642502] env[68569]: DEBUG nova.scheduler.client.report [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 752.657238] env[68569]: DEBUG nova.scheduler.client.report [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 752.657436] env[68569]: DEBUG nova.compute.provider_tree [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 752.664248] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.668696] env[68569]: DEBUG nova.scheduler.client.report [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 752.688473] env[68569]: DEBUG nova.scheduler.client.report [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 752.879164] env[68569]: INFO nova.compute.manager [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Took 51.26 seconds to build instance. [ 753.025785] env[68569]: INFO nova.compute.manager [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Swapping old allocation on dict_keys(['a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6']) held by migration aab3fe80-9a23-44ba-bdb2-b6166fdccf6c for instance [ 753.051840] env[68569]: DEBUG nova.scheduler.client.report [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Overwriting current allocation {'allocations': {'a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 72}}, 'project_id': 'fb7d044e2a2e4568b5c8c922b17a81ce', 'user_id': '48f0153c75da4790905b1f734eb447e0', 'consumer_generation': 1} on consumer 912303de-a79d-41b0-ab44-c79e850a4dee {{(pid=68569) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 753.146033] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166865, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464544} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.146227] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 398dd3c7-c630-4a29-b204-80f6fb394ce8/398dd3c7-c630-4a29-b204-80f6fb394ce8.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 753.146438] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 753.146683] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7dee48f2-44c4-4d0d-8e91-1cb3ff1b88f6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.153851] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 753.153851] env[68569]: value = "task-3166866" [ 753.153851] env[68569]: _type = "Task" [ 753.153851] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.161430] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166866, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.246830] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11521aec-5779-4b9d-b2ff-c04908cb366a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.264865] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883d9c25-fda9-4280-8cf7-09432cfa9dee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.270121] env[68569]: DEBUG nova.compute.manager [req-09f99687-b233-4980-bac5-74ef64d49856 req-c6682d6b-308e-4a79-959d-ede91d656402 service nova] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Received event network-vif-deleted-39d832b9-c266-46b1-9633-7204b1095ba5 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 753.270324] env[68569]: DEBUG nova.compute.manager [req-09f99687-b233-4980-bac5-74ef64d49856 req-c6682d6b-308e-4a79-959d-ede91d656402 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Received event network-changed-17651803-ec04-4913-b621-e685c574de0c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 753.270486] env[68569]: DEBUG nova.compute.manager [req-09f99687-b233-4980-bac5-74ef64d49856 req-c6682d6b-308e-4a79-959d-ede91d656402 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Refreshing instance network info cache due to event network-changed-17651803-ec04-4913-b621-e685c574de0c. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 753.270723] env[68569]: DEBUG oslo_concurrency.lockutils [req-09f99687-b233-4980-bac5-74ef64d49856 req-c6682d6b-308e-4a79-959d-ede91d656402 service nova] Acquiring lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.303564] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f8816c-bbb8-421c-acab-c86695e1fad3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.313033] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3147b6-ffcd-4c68-ac7c-e1a603cd8214 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.331326] env[68569]: DEBUG nova.compute.provider_tree [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.385834] env[68569]: DEBUG oslo_concurrency.lockutils [None req-148afe05-7e4c-4f7e-811c-c8653e8cef09 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Lock "9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.171s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.439266] env[68569]: DEBUG nova.network.neutron [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Successfully updated port: 5f4944ec-908c-452e-9cdf-7b42d3277124 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 753.451373] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Acquiring lock "9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.451773] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Lock "9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.452608] env[68569]: INFO nova.compute.manager [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Rebooting instance [ 753.489364] env[68569]: DEBUG nova.network.neutron [None req-8690cf5b-435b-422c-9baa-0b6a5e02c974 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Updating instance_info_cache with network_info: [{"id": "17651803-ec04-4913-b621-e685c574de0c", "address": "fa:16:3e:d8:ec:c5", "network": {"id": "daa85e6b-305e-4981-87a7-9c7e6cdd4113", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-224395556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de1ec9d4fc3e45afb009f75ac86d5f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17651803-ec", "ovs_interfaceid": "17651803-ec04-4913-b621-e685c574de0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.662743] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166866, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066996} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.663099] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 753.663899] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfa5c25-bcd3-409a-9c0b-8caefede7457 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.686227] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 398dd3c7-c630-4a29-b204-80f6fb394ce8/398dd3c7-c630-4a29-b204-80f6fb394ce8.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 753.686489] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1c56675-261d-4d1a-be14-42fdcf9b3022 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.711586] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 753.711586] env[68569]: value = "task-3166867" [ 753.711586] env[68569]: _type = "Task" [ 753.711586] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.720190] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166867, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.835014] env[68569]: DEBUG nova.scheduler.client.report [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 753.889411] env[68569]: DEBUG nova.compute.manager [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 753.946197] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "refresh_cache-b40c9dec-cebc-4d23-8df4-96e804333706" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.946433] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "refresh_cache-b40c9dec-cebc-4d23-8df4-96e804333706" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.947273] env[68569]: DEBUG nova.network.neutron [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 753.976140] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Acquiring lock "refresh_cache-9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.976416] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Acquired lock "refresh_cache-9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.976645] env[68569]: DEBUG nova.network.neutron [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 753.991838] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8690cf5b-435b-422c-9baa-0b6a5e02c974 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Releasing lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.992093] env[68569]: DEBUG nova.compute.manager [None req-8690cf5b-435b-422c-9baa-0b6a5e02c974 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Inject network info {{(pid=68569) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7744}} [ 753.992341] env[68569]: DEBUG nova.compute.manager [None req-8690cf5b-435b-422c-9baa-0b6a5e02c974 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] network_info to inject: |[{"id": "17651803-ec04-4913-b621-e685c574de0c", "address": "fa:16:3e:d8:ec:c5", "network": {"id": "daa85e6b-305e-4981-87a7-9c7e6cdd4113", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-224395556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de1ec9d4fc3e45afb009f75ac86d5f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17651803-ec", "ovs_interfaceid": "17651803-ec04-4913-b621-e685c574de0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7745}} [ 753.997069] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8690cf5b-435b-422c-9baa-0b6a5e02c974 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Reconfiguring VM instance to set the machine id {{(pid=68569) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 753.997674] env[68569]: DEBUG oslo_concurrency.lockutils [req-09f99687-b233-4980-bac5-74ef64d49856 req-c6682d6b-308e-4a79-959d-ede91d656402 service nova] Acquired lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.997851] env[68569]: DEBUG nova.network.neutron [req-09f99687-b233-4980-bac5-74ef64d49856 req-c6682d6b-308e-4a79-959d-ede91d656402 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Refreshing network info cache for port 17651803-ec04-4913-b621-e685c574de0c {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 753.999816] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81c3d9cc-c2b2-4a8a-b6be-c08ed0a302e9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.015312] env[68569]: DEBUG oslo_vmware.api [None req-8690cf5b-435b-422c-9baa-0b6a5e02c974 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Waiting for the task: (returnval){ [ 754.015312] env[68569]: value = "task-3166868" [ 754.015312] env[68569]: _type = "Task" [ 754.015312] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.023641] env[68569]: DEBUG oslo_vmware.api [None req-8690cf5b-435b-422c-9baa-0b6a5e02c974 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166868, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.207603] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "912303de-a79d-41b0-ab44-c79e850a4dee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.207958] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "912303de-a79d-41b0-ab44-c79e850a4dee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.208190] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "912303de-a79d-41b0-ab44-c79e850a4dee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.208371] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "912303de-a79d-41b0-ab44-c79e850a4dee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.208534] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "912303de-a79d-41b0-ab44-c79e850a4dee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.211681] env[68569]: INFO nova.compute.manager [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Terminating instance [ 754.224123] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166867, 'name': ReconfigVM_Task, 'duration_secs': 0.245308} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.225165] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 398dd3c7-c630-4a29-b204-80f6fb394ce8/398dd3c7-c630-4a29-b204-80f6fb394ce8.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 754.225767] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7b611e2-17fb-4202-ac3d-bb09c6d44a62 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.231845] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 754.231845] env[68569]: value = "task-3166869" [ 754.231845] env[68569]: _type = "Task" [ 754.231845] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.240232] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166869, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.340049] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.923s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.340591] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 754.344185] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.986s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.346035] env[68569]: INFO nova.compute.claims [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 754.412927] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.475683] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquiring lock "ad207187-634f-4e7f-9809-eb3f742ddeec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.475939] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Lock "ad207187-634f-4e7f-9809-eb3f742ddeec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.476162] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquiring lock "ad207187-634f-4e7f-9809-eb3f742ddeec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.476378] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Lock "ad207187-634f-4e7f-9809-eb3f742ddeec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.476512] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Lock "ad207187-634f-4e7f-9809-eb3f742ddeec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.478638] env[68569]: INFO nova.compute.manager [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Terminating instance [ 754.486651] env[68569]: DEBUG nova.network.neutron [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 754.526085] env[68569]: DEBUG oslo_vmware.api [None req-8690cf5b-435b-422c-9baa-0b6a5e02c974 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166868, 'name': ReconfigVM_Task, 'duration_secs': 0.170407} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.526349] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8690cf5b-435b-422c-9baa-0b6a5e02c974 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Reconfigured VM instance to set the machine id {{(pid=68569) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 754.628051] env[68569]: DEBUG oslo_concurrency.lockutils [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.720160] env[68569]: DEBUG nova.compute.manager [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 754.720386] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 754.721375] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3434366d-e6b5-4b8d-8f90-66834aca87b7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.731808] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 754.732069] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87e6f7c1-a612-4d67-a041-d9d641e5f4ce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.741884] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166869, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.745175] env[68569]: DEBUG oslo_vmware.api [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 754.745175] env[68569]: value = "task-3166870" [ 754.745175] env[68569]: _type = "Task" [ 754.745175] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.752924] env[68569]: DEBUG oslo_vmware.api [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166870, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.850637] env[68569]: DEBUG nova.compute.utils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 754.854117] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 754.854299] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 754.862744] env[68569]: DEBUG nova.network.neutron [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Updating instance_info_cache with network_info: [{"id": "5f4944ec-908c-452e-9cdf-7b42d3277124", "address": "fa:16:3e:60:3c:57", "network": {"id": "cf9f728e-bea5-495e-8933-c454fb6f87a7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1703203184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8bd8ff748a34e7a83ec0edbfa148aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f4944ec-90", "ovs_interfaceid": "5f4944ec-908c-452e-9cdf-7b42d3277124", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.922313] env[68569]: DEBUG nova.policy [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17242adad46e4c188e337060656e51c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '061d8bd706114545b34e9e0e6226d700', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 754.984712] env[68569]: DEBUG nova.compute.manager [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 754.984996] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 754.986629] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ebac83-4484-4118-b309-adb7b13dbea5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.995622] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 754.995908] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d802e1b-8ff8-4e9c-b6d1-c05af904234d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.002317] env[68569]: DEBUG oslo_vmware.api [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Waiting for the task: (returnval){ [ 755.002317] env[68569]: value = "task-3166871" [ 755.002317] env[68569]: _type = "Task" [ 755.002317] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.012179] env[68569]: DEBUG oslo_vmware.api [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166871, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.201800] env[68569]: DEBUG nova.network.neutron [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Updating instance_info_cache with network_info: [{"id": "f51da7b0-c063-444e-9ff2-3b08c597f70c", "address": "fa:16:3e:59:0d:97", "network": {"id": "4f17a4eb-cdda-4db3-994c-ccffdd1faedc", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1160351331-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d106cd175ad349bcada3d9c2bc9a40bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c02dd284-ab80-451c-93eb-48c8360acb9c", "external-id": "nsx-vlan-transportzone-818", "segmentation_id": 818, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf51da7b0-c0", "ovs_interfaceid": "f51da7b0-c063-444e-9ff2-3b08c597f70c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.243074] env[68569]: DEBUG nova.network.neutron [req-09f99687-b233-4980-bac5-74ef64d49856 req-c6682d6b-308e-4a79-959d-ede91d656402 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Updated VIF entry in instance network info cache for port 17651803-ec04-4913-b621-e685c574de0c. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 755.243477] env[68569]: DEBUG nova.network.neutron [req-09f99687-b233-4980-bac5-74ef64d49856 req-c6682d6b-308e-4a79-959d-ede91d656402 service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Updating instance_info_cache with network_info: [{"id": "17651803-ec04-4913-b621-e685c574de0c", "address": "fa:16:3e:d8:ec:c5", "network": {"id": "daa85e6b-305e-4981-87a7-9c7e6cdd4113", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-224395556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de1ec9d4fc3e45afb009f75ac86d5f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17651803-ec", "ovs_interfaceid": "17651803-ec04-4913-b621-e685c574de0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.254171] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166869, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.259950] env[68569]: DEBUG oslo_vmware.api [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166870, 'name': PowerOffVM_Task, 'duration_secs': 0.217244} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.260254] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 755.260421] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 755.260701] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-192fffa8-8b26-489d-a298-3d577f7f8ecc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.284213] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Successfully created port: 519d4e35-86dc-41ad-877e-d3d8f1145ccb {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 755.317308] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 755.317612] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 755.317831] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Deleting the datastore file [datastore2] 912303de-a79d-41b0-ab44-c79e850a4dee {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 755.318157] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c84430e-4690-4135-aa5f-556593d3b2ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.326376] env[68569]: DEBUG oslo_vmware.api [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 755.326376] env[68569]: value = "task-3166873" [ 755.326376] env[68569]: _type = "Task" [ 755.326376] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.334760] env[68569]: DEBUG oslo_vmware.api [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166873, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.358059] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 755.371024] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "refresh_cache-b40c9dec-cebc-4d23-8df4-96e804333706" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.371024] env[68569]: DEBUG nova.compute.manager [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Instance network_info: |[{"id": "5f4944ec-908c-452e-9cdf-7b42d3277124", "address": "fa:16:3e:60:3c:57", "network": {"id": "cf9f728e-bea5-495e-8933-c454fb6f87a7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1703203184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8bd8ff748a34e7a83ec0edbfa148aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f4944ec-90", "ovs_interfaceid": "5f4944ec-908c-452e-9cdf-7b42d3277124", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 755.371297] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:3c:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd33839ae-40ca-471b-92e3-eb282b920682', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f4944ec-908c-452e-9cdf-7b42d3277124', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 755.381849] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 755.382355] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 755.382961] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9167c347-41c0-4e43-ac2a-20f6d357533d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.409883] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 755.409883] env[68569]: value = "task-3166874" [ 755.409883] env[68569]: _type = "Task" [ 755.409883] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.420284] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166874, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.487507] env[68569]: DEBUG nova.compute.manager [req-2f2aa9e8-ed1a-4e24-8ddd-6a792afc4331 req-81d9cd7a-7816-467f-b38a-c5537857d06c service nova] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Received event network-vif-plugged-5f4944ec-908c-452e-9cdf-7b42d3277124 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 755.487827] env[68569]: DEBUG oslo_concurrency.lockutils [req-2f2aa9e8-ed1a-4e24-8ddd-6a792afc4331 req-81d9cd7a-7816-467f-b38a-c5537857d06c service nova] Acquiring lock "b40c9dec-cebc-4d23-8df4-96e804333706-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.488182] env[68569]: DEBUG oslo_concurrency.lockutils [req-2f2aa9e8-ed1a-4e24-8ddd-6a792afc4331 req-81d9cd7a-7816-467f-b38a-c5537857d06c service nova] Lock "b40c9dec-cebc-4d23-8df4-96e804333706-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.489409] env[68569]: DEBUG oslo_concurrency.lockutils [req-2f2aa9e8-ed1a-4e24-8ddd-6a792afc4331 req-81d9cd7a-7816-467f-b38a-c5537857d06c service nova] Lock "b40c9dec-cebc-4d23-8df4-96e804333706-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.489409] env[68569]: DEBUG nova.compute.manager [req-2f2aa9e8-ed1a-4e24-8ddd-6a792afc4331 req-81d9cd7a-7816-467f-b38a-c5537857d06c service nova] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] No waiting events found dispatching network-vif-plugged-5f4944ec-908c-452e-9cdf-7b42d3277124 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 755.489409] env[68569]: WARNING nova.compute.manager [req-2f2aa9e8-ed1a-4e24-8ddd-6a792afc4331 req-81d9cd7a-7816-467f-b38a-c5537857d06c service nova] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Received unexpected event network-vif-plugged-5f4944ec-908c-452e-9cdf-7b42d3277124 for instance with vm_state building and task_state spawning. [ 755.489409] env[68569]: DEBUG nova.compute.manager [req-2f2aa9e8-ed1a-4e24-8ddd-6a792afc4331 req-81d9cd7a-7816-467f-b38a-c5537857d06c service nova] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Received event network-changed-5f4944ec-908c-452e-9cdf-7b42d3277124 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 755.489698] env[68569]: DEBUG nova.compute.manager [req-2f2aa9e8-ed1a-4e24-8ddd-6a792afc4331 req-81d9cd7a-7816-467f-b38a-c5537857d06c service nova] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Refreshing instance network info cache due to event network-changed-5f4944ec-908c-452e-9cdf-7b42d3277124. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 755.490542] env[68569]: DEBUG oslo_concurrency.lockutils [req-2f2aa9e8-ed1a-4e24-8ddd-6a792afc4331 req-81d9cd7a-7816-467f-b38a-c5537857d06c service nova] Acquiring lock "refresh_cache-b40c9dec-cebc-4d23-8df4-96e804333706" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.490542] env[68569]: DEBUG oslo_concurrency.lockutils [req-2f2aa9e8-ed1a-4e24-8ddd-6a792afc4331 req-81d9cd7a-7816-467f-b38a-c5537857d06c service nova] Acquired lock "refresh_cache-b40c9dec-cebc-4d23-8df4-96e804333706" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 755.490542] env[68569]: DEBUG nova.network.neutron [req-2f2aa9e8-ed1a-4e24-8ddd-6a792afc4331 req-81d9cd7a-7816-467f-b38a-c5537857d06c service nova] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Refreshing network info cache for port 5f4944ec-908c-452e-9cdf-7b42d3277124 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 755.519481] env[68569]: DEBUG oslo_vmware.api [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166871, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.704053] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Releasing lock "refresh_cache-9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.745606] env[68569]: DEBUG oslo_concurrency.lockutils [req-09f99687-b233-4980-bac5-74ef64d49856 req-c6682d6b-308e-4a79-959d-ede91d656402 service nova] Releasing lock "refresh_cache-ad207187-634f-4e7f-9809-eb3f742ddeec" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.750759] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166869, 'name': Rename_Task, 'duration_secs': 1.164159} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.753240] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 755.753658] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4b5957f-7aec-422a-952c-4624ab5fb48a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.760862] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 755.760862] env[68569]: value = "task-3166875" [ 755.760862] env[68569]: _type = "Task" [ 755.760862] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.775057] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166875, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.839861] env[68569]: DEBUG oslo_vmware.api [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166873, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169162} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.840366] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 755.840680] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 755.841061] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 755.841454] env[68569]: INFO nova.compute.manager [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Took 1.12 seconds to destroy the instance on the hypervisor. [ 755.841932] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 755.842311] env[68569]: DEBUG nova.compute.manager [-] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 755.842551] env[68569]: DEBUG nova.network.neutron [-] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 755.920393] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166874, 'name': CreateVM_Task, 'duration_secs': 0.32602} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.920393] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 755.920928] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.921178] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 755.921438] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 755.924573] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c844ad0c-78d5-4c1f-841a-9255bd3b78a9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.930460] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 755.930460] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c7bbd6-9a1d-dd00-0ec2-e1003c402aff" [ 755.930460] env[68569]: _type = "Task" [ 755.930460] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.939621] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c7bbd6-9a1d-dd00-0ec2-e1003c402aff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.969626] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047513ec-e002-4844-9965-bb3671134317 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.977588] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81764b15-4b48-4cb3-9a5e-be40e6c2b87f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.011354] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "925400c4-3b33-4f4a-9f63-3ceec06cf0b7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.011596] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "925400c4-3b33-4f4a-9f63-3ceec06cf0b7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.011791] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "925400c4-3b33-4f4a-9f63-3ceec06cf0b7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.011963] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "925400c4-3b33-4f4a-9f63-3ceec06cf0b7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.012131] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "925400c4-3b33-4f4a-9f63-3ceec06cf0b7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.017468] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89c83ba-3385-4163-be01-d2c8b1f85429 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.019656] env[68569]: INFO nova.compute.manager [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Terminating instance [ 756.026331] env[68569]: DEBUG oslo_vmware.api [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166871, 'name': PowerOffVM_Task, 'duration_secs': 0.546319} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.028642] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 756.028833] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 756.029399] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-565a361c-0a05-428b-bc55-4961b256b573 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.031913] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-486c4095-59ca-4b3b-88fe-7c88b71f7b89 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.047114] env[68569]: DEBUG nova.compute.provider_tree [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 756.095824] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 756.096148] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 756.096357] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Deleting the datastore file [datastore1] ad207187-634f-4e7f-9809-eb3f742ddeec {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 756.096611] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f2939a85-c58a-462c-bb2d-2939eccc006c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.102389] env[68569]: DEBUG oslo_vmware.api [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Waiting for the task: (returnval){ [ 756.102389] env[68569]: value = "task-3166877" [ 756.102389] env[68569]: _type = "Task" [ 756.102389] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.110476] env[68569]: DEBUG oslo_vmware.api [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166877, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.208464] env[68569]: DEBUG nova.compute.manager [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 756.209496] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2342c02-42f3-4e6d-8a81-033e064185ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.269997] env[68569]: DEBUG oslo_vmware.api [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166875, 'name': PowerOnVM_Task, 'duration_secs': 0.468385} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.270128] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 756.270319] env[68569]: INFO nova.compute.manager [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Took 9.23 seconds to spawn the instance on the hypervisor. [ 756.270496] env[68569]: DEBUG nova.compute.manager [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 756.271322] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfff24e0-3112-4677-bc17-83788715e95f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.281234] env[68569]: DEBUG nova.network.neutron [req-2f2aa9e8-ed1a-4e24-8ddd-6a792afc4331 req-81d9cd7a-7816-467f-b38a-c5537857d06c service nova] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Updated VIF entry in instance network info cache for port 5f4944ec-908c-452e-9cdf-7b42d3277124. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 756.281559] env[68569]: DEBUG nova.network.neutron [req-2f2aa9e8-ed1a-4e24-8ddd-6a792afc4331 req-81d9cd7a-7816-467f-b38a-c5537857d06c service nova] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Updating instance_info_cache with network_info: [{"id": "5f4944ec-908c-452e-9cdf-7b42d3277124", "address": "fa:16:3e:60:3c:57", "network": {"id": "cf9f728e-bea5-495e-8933-c454fb6f87a7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1703203184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8bd8ff748a34e7a83ec0edbfa148aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f4944ec-90", "ovs_interfaceid": "5f4944ec-908c-452e-9cdf-7b42d3277124", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.370536] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 756.404569] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 756.404832] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 756.404980] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 756.405180] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 756.405326] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 756.405469] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 756.405670] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 756.405829] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 756.406397] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 756.406397] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 756.406397] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 756.407184] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1693ee71-9943-47eb-b21c-d6dfbf7cea31 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.415343] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f71f50-3037-4b4e-9ba7-a67026f107ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.439647] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c7bbd6-9a1d-dd00-0ec2-e1003c402aff, 'name': SearchDatastore_Task, 'duration_secs': 0.013333} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.439946] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.440228] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 756.440446] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.440590] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.440766] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 756.441035] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3f55ed4-5a95-40ea-be5e-a47066ece2e7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.453238] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 756.453787] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 756.454126] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e7296e8-36d2-4bf8-b2b9-5a6c59a7babf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.461980] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 756.461980] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52750117-6c06-2154-353b-1f8f3f8ba896" [ 756.461980] env[68569]: _type = "Task" [ 756.461980] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.469109] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52750117-6c06-2154-353b-1f8f3f8ba896, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.528060] env[68569]: DEBUG nova.compute.manager [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 756.528168] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 756.529021] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f819057-5807-41ef-881e-d066b0e4776c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.536323] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 756.536564] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de34517b-d4dc-4c24-95a4-1965eeefb54e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.542860] env[68569]: DEBUG oslo_vmware.api [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 756.542860] env[68569]: value = "task-3166878" [ 756.542860] env[68569]: _type = "Task" [ 756.542860] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.552560] env[68569]: DEBUG oslo_vmware.api [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166878, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.571265] env[68569]: ERROR nova.scheduler.client.report [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [req-9108d5ec-af3e-49ed-935e-2a9c966383cd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9108d5ec-af3e-49ed-935e-2a9c966383cd"}]} [ 756.589195] env[68569]: DEBUG nova.scheduler.client.report [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 756.608632] env[68569]: DEBUG nova.scheduler.client.report [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 756.609341] env[68569]: DEBUG nova.compute.provider_tree [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 756.617323] env[68569]: DEBUG oslo_vmware.api [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Task: {'id': task-3166877, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218895} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.617649] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 756.617840] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 756.618045] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 756.618225] env[68569]: INFO nova.compute.manager [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Took 1.63 seconds to destroy the instance on the hypervisor. [ 756.618466] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 756.618776] env[68569]: DEBUG nova.compute.manager [-] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 756.618776] env[68569]: DEBUG nova.network.neutron [-] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 756.622029] env[68569]: DEBUG nova.scheduler.client.report [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 756.643123] env[68569]: DEBUG nova.scheduler.client.report [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 756.687458] env[68569]: DEBUG nova.network.neutron [-] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.792602] env[68569]: DEBUG oslo_concurrency.lockutils [req-2f2aa9e8-ed1a-4e24-8ddd-6a792afc4331 req-81d9cd7a-7816-467f-b38a-c5537857d06c service nova] Releasing lock "refresh_cache-b40c9dec-cebc-4d23-8df4-96e804333706" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.794319] env[68569]: INFO nova.compute.manager [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Took 47.98 seconds to build instance. [ 756.938971] env[68569]: DEBUG nova.compute.manager [req-47626fa7-08ee-4100-aec5-fe6360726b18 req-6e15c6b7-fe9a-4fe5-aacc-c5f3b34f762f service nova] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Received event network-vif-plugged-519d4e35-86dc-41ad-877e-d3d8f1145ccb {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 756.939286] env[68569]: DEBUG oslo_concurrency.lockutils [req-47626fa7-08ee-4100-aec5-fe6360726b18 req-6e15c6b7-fe9a-4fe5-aacc-c5f3b34f762f service nova] Acquiring lock "64146253-16ab-4d95-83c9-31b74014a040-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.939515] env[68569]: DEBUG oslo_concurrency.lockutils [req-47626fa7-08ee-4100-aec5-fe6360726b18 req-6e15c6b7-fe9a-4fe5-aacc-c5f3b34f762f service nova] Lock "64146253-16ab-4d95-83c9-31b74014a040-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.939559] env[68569]: DEBUG oslo_concurrency.lockutils [req-47626fa7-08ee-4100-aec5-fe6360726b18 req-6e15c6b7-fe9a-4fe5-aacc-c5f3b34f762f service nova] Lock "64146253-16ab-4d95-83c9-31b74014a040-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.939748] env[68569]: DEBUG nova.compute.manager [req-47626fa7-08ee-4100-aec5-fe6360726b18 req-6e15c6b7-fe9a-4fe5-aacc-c5f3b34f762f service nova] [instance: 64146253-16ab-4d95-83c9-31b74014a040] No waiting events found dispatching network-vif-plugged-519d4e35-86dc-41ad-877e-d3d8f1145ccb {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 756.939911] env[68569]: WARNING nova.compute.manager [req-47626fa7-08ee-4100-aec5-fe6360726b18 req-6e15c6b7-fe9a-4fe5-aacc-c5f3b34f762f service nova] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Received unexpected event network-vif-plugged-519d4e35-86dc-41ad-877e-d3d8f1145ccb for instance with vm_state building and task_state spawning. [ 756.961521] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Successfully updated port: 519d4e35-86dc-41ad-877e-d3d8f1145ccb {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 756.982137] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52750117-6c06-2154-353b-1f8f3f8ba896, 'name': SearchDatastore_Task, 'duration_secs': 0.022145} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.985239] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99ac7ac2-db58-40e3-8d03-c73d181b3946 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.992608] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 756.992608] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526e985c-b7fa-20c9-d556-4a34cc3f81b3" [ 756.992608] env[68569]: _type = "Task" [ 756.992608] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.011748] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526e985c-b7fa-20c9-d556-4a34cc3f81b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.057222] env[68569]: DEBUG oslo_vmware.api [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166878, 'name': PowerOffVM_Task, 'duration_secs': 0.484415} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.060212] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 757.060212] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 757.060505] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-828ad04c-85eb-456b-a1bb-03fc48daf720 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.131060] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 757.131288] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 757.131469] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Deleting the datastore file [datastore1] 925400c4-3b33-4f4a-9f63-3ceec06cf0b7 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 757.131773] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-288cf610-6d87-4ee8-848c-07bc77685e8f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.144084] env[68569]: DEBUG oslo_vmware.api [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for the task: (returnval){ [ 757.144084] env[68569]: value = "task-3166880" [ 757.144084] env[68569]: _type = "Task" [ 757.144084] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.152427] env[68569]: DEBUG oslo_vmware.api [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166880, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.191254] env[68569]: INFO nova.compute.manager [-] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Took 1.35 seconds to deallocate network for instance. [ 757.227389] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96638535-2aaa-41c9-bebf-c2cec744f4af {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.237500] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Doing hard reboot of VM {{(pid=68569) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 757.238428] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-7f4de8e8-fc41-4081-98b9-6be0e3e24bd7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.246110] env[68569]: DEBUG oslo_vmware.api [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Waiting for the task: (returnval){ [ 757.246110] env[68569]: value = "task-3166881" [ 757.246110] env[68569]: _type = "Task" [ 757.246110] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.259465] env[68569]: DEBUG oslo_vmware.api [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166881, 'name': ResetVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.287303] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9881cb28-e658-41b2-a592-b3df44a65488 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.294768] env[68569]: DEBUG oslo_concurrency.lockutils [None req-079ee99e-f7f0-4f85-af23-e7f7e5161dad tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "398dd3c7-c630-4a29-b204-80f6fb394ce8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.184s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.296787] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915bb454-c203-49ec-81bb-39b3e7bac852 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.346194] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e2488f-fe11-4351-bfb6-620a66d52012 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.354852] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde94cae-f325-4b9b-8e74-df8344dfe7ac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.370835] env[68569]: DEBUG nova.compute.provider_tree [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 757.473307] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "refresh_cache-64146253-16ab-4d95-83c9-31b74014a040" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.473307] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquired lock "refresh_cache-64146253-16ab-4d95-83c9-31b74014a040" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.473307] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 757.503274] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526e985c-b7fa-20c9-d556-4a34cc3f81b3, 'name': SearchDatastore_Task, 'duration_secs': 0.031179} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.504100] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.504100] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] b40c9dec-cebc-4d23-8df4-96e804333706/b40c9dec-cebc-4d23-8df4-96e804333706.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 757.504100] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc1ef0af-ca58-43c7-883d-356b9afe3ee9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.511015] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 757.511015] env[68569]: value = "task-3166882" [ 757.511015] env[68569]: _type = "Task" [ 757.511015] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.519567] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166882, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.525720] env[68569]: DEBUG nova.compute.manager [req-ebf56a7f-8e91-47ec-a24f-16cea13fe2a1 req-3cfb5d5c-33ca-4b8c-b1c3-d1b0825f2ecd service nova] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Received event network-vif-deleted-1578930e-ed30-4f23-9d6a-55f02a439b25 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 757.525953] env[68569]: DEBUG nova.compute.manager [req-ebf56a7f-8e91-47ec-a24f-16cea13fe2a1 req-3cfb5d5c-33ca-4b8c-b1c3-d1b0825f2ecd service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Received event network-vif-deleted-17651803-ec04-4913-b621-e685c574de0c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 757.526128] env[68569]: INFO nova.compute.manager [req-ebf56a7f-8e91-47ec-a24f-16cea13fe2a1 req-3cfb5d5c-33ca-4b8c-b1c3-d1b0825f2ecd service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Neutron deleted interface 17651803-ec04-4913-b621-e685c574de0c; detaching it from the instance and deleting it from the info cache [ 757.526291] env[68569]: DEBUG nova.network.neutron [req-ebf56a7f-8e91-47ec-a24f-16cea13fe2a1 req-3cfb5d5c-33ca-4b8c-b1c3-d1b0825f2ecd service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.656485] env[68569]: DEBUG oslo_vmware.api [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Task: {'id': task-3166880, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235685} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.657088] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 757.657088] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 757.657088] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 757.657252] env[68569]: INFO nova.compute.manager [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 757.657492] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 757.658045] env[68569]: DEBUG nova.compute.manager [-] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 757.658147] env[68569]: DEBUG nova.network.neutron [-] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 757.700845] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.724270] env[68569]: DEBUG nova.network.neutron [-] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.757342] env[68569]: DEBUG oslo_vmware.api [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166881, 'name': ResetVM_Task, 'duration_secs': 0.115248} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.757647] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Did hard reboot of VM {{(pid=68569) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 757.757868] env[68569]: DEBUG nova.compute.manager [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 757.758801] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0b8447-b955-41c5-af10-a6ac88b6b846 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.809718] env[68569]: DEBUG nova.compute.manager [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 757.926350] env[68569]: DEBUG nova.scheduler.client.report [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 73 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 757.926350] env[68569]: DEBUG nova.compute.provider_tree [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 73 to 74 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 757.926350] env[68569]: DEBUG nova.compute.provider_tree [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 758.023276] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166882, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.029441] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 758.031874] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0c9a7df7-341c-4c2b-acf3-1dea044d3672 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.043286] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-436d3777-af30-4385-bdd2-875703258bda {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.076680] env[68569]: DEBUG nova.compute.manager [req-ebf56a7f-8e91-47ec-a24f-16cea13fe2a1 req-3cfb5d5c-33ca-4b8c-b1c3-d1b0825f2ecd service nova] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Detach interface failed, port_id=17651803-ec04-4913-b621-e685c574de0c, reason: Instance ad207187-634f-4e7f-9809-eb3f742ddeec could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 758.181784] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Updating instance_info_cache with network_info: [{"id": "519d4e35-86dc-41ad-877e-d3d8f1145ccb", "address": "fa:16:3e:fd:1a:15", "network": {"id": "9dc67ae7-e219-42f7-90ca-c00b1962ea5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325092138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "061d8bd706114545b34e9e0e6226d700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap519d4e35-86", "ovs_interfaceid": "519d4e35-86dc-41ad-877e-d3d8f1145ccb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.227460] env[68569]: INFO nova.compute.manager [-] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Took 1.61 seconds to deallocate network for instance. [ 758.274024] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b7b668d-2918-45f9-9090-4892834f6bc6 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Lock "9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.822s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.333249] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.432817] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.087s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.432817] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 758.434954] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.155s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.435220] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.439546] env[68569]: DEBUG oslo_concurrency.lockutils [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.998s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.439773] env[68569]: DEBUG oslo_concurrency.lockutils [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.002s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.441648] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.571s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.443110] env[68569]: INFO nova.compute.claims [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 758.473326] env[68569]: INFO nova.scheduler.client.report [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Deleted allocations for instance 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c [ 758.474974] env[68569]: INFO nova.scheduler.client.report [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Deleted allocations for instance 6606b921-4f3a-44f5-ae4e-c600f26876fc [ 758.524591] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166882, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537445} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.525865] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] b40c9dec-cebc-4d23-8df4-96e804333706/b40c9dec-cebc-4d23-8df4-96e804333706.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 758.525865] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 758.526063] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee29bfa2-7aab-46aa-aad3-9f5796823852 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.528575] env[68569]: DEBUG nova.network.neutron [-] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.534679] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 758.534679] env[68569]: value = "task-3166883" [ 758.534679] env[68569]: _type = "Task" [ 758.534679] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.542022] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166883, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.646727] env[68569]: DEBUG oslo_concurrency.lockutils [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Acquiring lock "9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.647102] env[68569]: DEBUG oslo_concurrency.lockutils [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Lock "9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.647324] env[68569]: DEBUG oslo_concurrency.lockutils [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Acquiring lock "9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.647504] env[68569]: DEBUG oslo_concurrency.lockutils [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Lock "9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.647680] env[68569]: DEBUG oslo_concurrency.lockutils [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Lock "9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.649987] env[68569]: INFO nova.compute.manager [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Terminating instance [ 758.685568] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Releasing lock "refresh_cache-64146253-16ab-4d95-83c9-31b74014a040" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.685568] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Instance network_info: |[{"id": "519d4e35-86dc-41ad-877e-d3d8f1145ccb", "address": "fa:16:3e:fd:1a:15", "network": {"id": "9dc67ae7-e219-42f7-90ca-c00b1962ea5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325092138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "061d8bd706114545b34e9e0e6226d700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap519d4e35-86", "ovs_interfaceid": "519d4e35-86dc-41ad-877e-d3d8f1145ccb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 758.685829] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:1a:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bd3c6b64-aba2-4bdc-a693-3b4dff3ed861', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '519d4e35-86dc-41ad-877e-d3d8f1145ccb', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 758.693914] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Creating folder: Project (061d8bd706114545b34e9e0e6226d700). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 758.694784] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd464f8f-dc49-4591-b20a-11cb63b1ccd0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.704970] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Created folder: Project (061d8bd706114545b34e9e0e6226d700) in parent group-v633430. [ 758.705205] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Creating folder: Instances. Parent ref: group-v633562. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 758.705522] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c931a635-ac26-476e-99ed-bb2fa39119a4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.714645] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Created folder: Instances in parent group-v633562. [ 758.714992] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 758.715284] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 758.715713] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3a10d80a-7721-472c-9081-46895062fc98 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.742392] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.747620] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 758.747620] env[68569]: value = "task-3166886" [ 758.747620] env[68569]: _type = "Task" [ 758.747620] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.760451] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166886, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.950510] env[68569]: DEBUG nova.compute.utils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 758.950976] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 758.950976] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 758.966728] env[68569]: DEBUG nova.compute.manager [req-7bc5a91e-38c5-4be1-a7c9-3b786f8d146f req-8c992dfe-1ce9-4c76-a6ba-9e9540b965c5 service nova] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Received event network-changed-519d4e35-86dc-41ad-877e-d3d8f1145ccb {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 758.966975] env[68569]: DEBUG nova.compute.manager [req-7bc5a91e-38c5-4be1-a7c9-3b786f8d146f req-8c992dfe-1ce9-4c76-a6ba-9e9540b965c5 service nova] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Refreshing instance network info cache due to event network-changed-519d4e35-86dc-41ad-877e-d3d8f1145ccb. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 758.967231] env[68569]: DEBUG oslo_concurrency.lockutils [req-7bc5a91e-38c5-4be1-a7c9-3b786f8d146f req-8c992dfe-1ce9-4c76-a6ba-9e9540b965c5 service nova] Acquiring lock "refresh_cache-64146253-16ab-4d95-83c9-31b74014a040" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.967406] env[68569]: DEBUG oslo_concurrency.lockutils [req-7bc5a91e-38c5-4be1-a7c9-3b786f8d146f req-8c992dfe-1ce9-4c76-a6ba-9e9540b965c5 service nova] Acquired lock "refresh_cache-64146253-16ab-4d95-83c9-31b74014a040" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.968107] env[68569]: DEBUG nova.network.neutron [req-7bc5a91e-38c5-4be1-a7c9-3b786f8d146f req-8c992dfe-1ce9-4c76-a6ba-9e9540b965c5 service nova] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Refreshing network info cache for port 519d4e35-86dc-41ad-877e-d3d8f1145ccb {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 758.985771] env[68569]: DEBUG oslo_concurrency.lockutils [None req-efa87a53-2885-4fd8-8242-87f0b3c3c6c0 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "6606b921-4f3a-44f5-ae4e-c600f26876fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.158s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.986873] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ba6d586-d642-427e-9be8-649b498023ec tempest-InstanceActionsV221TestJSON-1680468374 tempest-InstanceActionsV221TestJSON-1680468374-project-member] Lock "2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.471s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.031883] env[68569]: INFO nova.compute.manager [-] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Took 1.37 seconds to deallocate network for instance. [ 759.047775] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166883, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068752} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.048103] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 759.048907] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de50985d-cd13-4e39-ac6b-adea4c457762 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.073117] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] b40c9dec-cebc-4d23-8df4-96e804333706/b40c9dec-cebc-4d23-8df4-96e804333706.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 759.074215] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a283fa5-c499-4704-ac9e-6875bb7a5c74 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.092633] env[68569]: DEBUG nova.policy [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17242adad46e4c188e337060656e51c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '061d8bd706114545b34e9e0e6226d700', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 759.100497] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 759.100497] env[68569]: value = "task-3166887" [ 759.100497] env[68569]: _type = "Task" [ 759.100497] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.110156] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166887, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.157270] env[68569]: DEBUG nova.compute.manager [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 759.157270] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 759.157270] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfdef8d-0752-4944-9a76-47fe86fe943b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.166173] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 759.166173] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e797be2-1f3f-4239-b2de-f4880e49350e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.175167] env[68569]: DEBUG oslo_vmware.api [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Waiting for the task: (returnval){ [ 759.175167] env[68569]: value = "task-3166888" [ 759.175167] env[68569]: _type = "Task" [ 759.175167] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.182585] env[68569]: DEBUG oslo_vmware.api [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166888, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.261745] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166886, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.459168] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 759.463216] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Successfully created port: cdf95c3f-e426-4563-995a-6fcfe2ffa912 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 759.540151] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.619824] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166887, 'name': ReconfigVM_Task, 'duration_secs': 0.499263} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.622870] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Reconfigured VM instance instance-0000002b to attach disk [datastore2] b40c9dec-cebc-4d23-8df4-96e804333706/b40c9dec-cebc-4d23-8df4-96e804333706.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 759.625613] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-618ea4c7-5d62-43cd-9d45-b02cf68bb44a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.632625] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 759.632625] env[68569]: value = "task-3166889" [ 759.632625] env[68569]: _type = "Task" [ 759.632625] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.642960] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166889, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.680310] env[68569]: DEBUG nova.compute.manager [req-8146eaa2-c18d-4c92-b009-234c5e9b0848 req-7491f743-7bb9-441e-b84d-e720467e7516 service nova] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Received event network-vif-deleted-a41c265a-33e8-416e-bf15-87c720e60d25 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 759.690201] env[68569]: DEBUG oslo_vmware.api [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166888, 'name': PowerOffVM_Task, 'duration_secs': 0.202183} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.690201] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 759.690201] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 759.690201] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e139fb2-f761-47e1-a800-121980f7baea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.755364] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 759.755588] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 759.755766] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Deleting the datastore file [datastore2] 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 759.758610] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa4b89ac-2185-43c5-a264-c85f29700459 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.766711] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166886, 'name': CreateVM_Task, 'duration_secs': 0.559673} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.767205] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 759.767891] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.768169] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 759.768489] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 759.769099] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d3eae0c-37d7-475c-9aad-d4fea98dc69f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.772739] env[68569]: DEBUG oslo_vmware.api [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Waiting for the task: (returnval){ [ 759.772739] env[68569]: value = "task-3166891" [ 759.772739] env[68569]: _type = "Task" [ 759.772739] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.780606] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 759.780606] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526cfe89-5530-c0f8-da64-9f5bd365f515" [ 759.780606] env[68569]: _type = "Task" [ 759.780606] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.787972] env[68569]: DEBUG oslo_vmware.api [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166891, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.797122] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526cfe89-5530-c0f8-da64-9f5bd365f515, 'name': SearchDatastore_Task, 'duration_secs': 0.010322} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.797122] env[68569]: DEBUG nova.network.neutron [req-7bc5a91e-38c5-4be1-a7c9-3b786f8d146f req-8c992dfe-1ce9-4c76-a6ba-9e9540b965c5 service nova] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Updated VIF entry in instance network info cache for port 519d4e35-86dc-41ad-877e-d3d8f1145ccb. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 759.797241] env[68569]: DEBUG nova.network.neutron [req-7bc5a91e-38c5-4be1-a7c9-3b786f8d146f req-8c992dfe-1ce9-4c76-a6ba-9e9540b965c5 service nova] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Updating instance_info_cache with network_info: [{"id": "519d4e35-86dc-41ad-877e-d3d8f1145ccb", "address": "fa:16:3e:fd:1a:15", "network": {"id": "9dc67ae7-e219-42f7-90ca-c00b1962ea5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325092138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "061d8bd706114545b34e9e0e6226d700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap519d4e35-86", "ovs_interfaceid": "519d4e35-86dc-41ad-877e-d3d8f1145ccb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.798665] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.798918] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 759.799168] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.799315] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 759.799489] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 759.800158] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04b1c5ce-ba12-4866-a590-79dbceba4638 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.819519] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 759.819519] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 759.819519] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4249b7ac-f7cf-4d44-9a54-cab13b6c93cb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.829344] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 759.829344] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527b1ea6-61b7-bc42-26f5-c63242fd04dc" [ 759.829344] env[68569]: _type = "Task" [ 759.829344] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.841511] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527b1ea6-61b7-bc42-26f5-c63242fd04dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.021104] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46fd4a03-e0c3-47d4-bed0-585fb1529fd2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.029031] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e998bf-75b4-4f48-bf52-d895ef911e30 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.063137] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca0e6c4-fe6e-4f72-a6d7-587d068c6716 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.070026] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "39a84212-2e52-4dba-b00c-5689564deaf4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.070281] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "39a84212-2e52-4dba-b00c-5689564deaf4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.070479] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "39a84212-2e52-4dba-b00c-5689564deaf4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.070670] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "39a84212-2e52-4dba-b00c-5689564deaf4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.070848] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "39a84212-2e52-4dba-b00c-5689564deaf4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.073454] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0028f08-9597-49b9-823c-4b0ede3486df {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.078020] env[68569]: INFO nova.compute.manager [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Terminating instance [ 760.093108] env[68569]: DEBUG nova.compute.provider_tree [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.095284] env[68569]: DEBUG nova.compute.manager [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 760.095527] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 760.096341] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1495bf9c-a718-48a0-8cd9-87d0604ee809 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.103642] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 760.104100] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99ba43f3-0a3b-4cbc-b8b4-0dc66c4ef2d8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.112059] env[68569]: DEBUG oslo_vmware.api [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 760.112059] env[68569]: value = "task-3166892" [ 760.112059] env[68569]: _type = "Task" [ 760.112059] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.120112] env[68569]: DEBUG oslo_vmware.api [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166892, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.143948] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166889, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.285177] env[68569]: DEBUG oslo_vmware.api [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166891, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.300938] env[68569]: DEBUG oslo_concurrency.lockutils [req-7bc5a91e-38c5-4be1-a7c9-3b786f8d146f req-8c992dfe-1ce9-4c76-a6ba-9e9540b965c5 service nova] Releasing lock "refresh_cache-64146253-16ab-4d95-83c9-31b74014a040" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.342069] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527b1ea6-61b7-bc42-26f5-c63242fd04dc, 'name': SearchDatastore_Task, 'duration_secs': 0.011606} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.342862] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f235823e-657d-4a27-870b-71c85e7fcb2a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.348141] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 760.348141] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521e75a0-6a42-d8f4-d7b0-9d0c8721d8f1" [ 760.348141] env[68569]: _type = "Task" [ 760.348141] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.356030] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521e75a0-6a42-d8f4-d7b0-9d0c8721d8f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.469464] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 760.498560] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 760.498793] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 760.498942] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 760.499133] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 760.499270] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 760.499409] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 760.499607] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 760.499815] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 760.500038] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 760.500208] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 760.500378] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 760.501301] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee807e3-1d1a-4d1d-9f42-db28a3998ec1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.509799] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35934c24-8afc-448f-a95e-503b0fa6064a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.600568] env[68569]: DEBUG nova.scheduler.client.report [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 760.623418] env[68569]: DEBUG oslo_vmware.api [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166892, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.643525] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166889, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.783488] env[68569]: DEBUG oslo_vmware.api [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Task: {'id': task-3166891, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.52481} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.783833] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 760.785761] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 760.785990] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 760.786193] env[68569]: INFO nova.compute.manager [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Took 1.63 seconds to destroy the instance on the hypervisor. [ 760.786441] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 760.786636] env[68569]: DEBUG nova.compute.manager [-] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 760.786731] env[68569]: DEBUG nova.network.neutron [-] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 760.858501] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521e75a0-6a42-d8f4-d7b0-9d0c8721d8f1, 'name': SearchDatastore_Task, 'duration_secs': 0.023939} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.858885] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.859190] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 64146253-16ab-4d95-83c9-31b74014a040/64146253-16ab-4d95-83c9-31b74014a040.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 760.859464] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53f4951a-1bd2-4dc5-87f3-ddc7dafee9de {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.865962] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 760.865962] env[68569]: value = "task-3166893" [ 760.865962] env[68569]: _type = "Task" [ 760.865962] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.873871] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166893, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.109316] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.664s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.109316] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 761.110901] env[68569]: DEBUG oslo_concurrency.lockutils [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.897s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.111314] env[68569]: DEBUG oslo_concurrency.lockutils [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.113936] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.412s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.116359] env[68569]: INFO nova.compute.claims [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 761.135998] env[68569]: DEBUG oslo_vmware.api [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166892, 'name': PowerOffVM_Task, 'duration_secs': 0.974582} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.141526] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 761.141714] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 761.142653] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fcee8bfe-62a5-45db-a509-09d68b108b5e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.148712] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166889, 'name': Rename_Task, 'duration_secs': 1.290752} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.148756] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 761.148993] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9288033d-2529-4500-9c54-9083463f71ed {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.158243] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 761.158243] env[68569]: value = "task-3166895" [ 761.158243] env[68569]: _type = "Task" [ 761.158243] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.167206] env[68569]: INFO nova.scheduler.client.report [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Deleted allocations for instance e77cc179-1f3d-4095-a491-48df7f79bdb9 [ 761.175977] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166895, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.219099] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 761.219099] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 761.219099] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Deleting the datastore file [datastore1] 39a84212-2e52-4dba-b00c-5689564deaf4 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 761.219331] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-332ecb45-70fb-4d92-8010-0f0532637450 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.226402] env[68569]: DEBUG oslo_vmware.api [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for the task: (returnval){ [ 761.226402] env[68569]: value = "task-3166896" [ 761.226402] env[68569]: _type = "Task" [ 761.226402] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.236164] env[68569]: DEBUG oslo_vmware.api [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166896, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.260172] env[68569]: DEBUG nova.compute.manager [req-395acf57-8f5d-4854-b36b-6ac911970172 req-cfb36499-bfbe-4e30-a0e9-a43c1a09133a service nova] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Received event network-vif-plugged-cdf95c3f-e426-4563-995a-6fcfe2ffa912 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 761.260393] env[68569]: DEBUG oslo_concurrency.lockutils [req-395acf57-8f5d-4854-b36b-6ac911970172 req-cfb36499-bfbe-4e30-a0e9-a43c1a09133a service nova] Acquiring lock "c634f7eb-2f71-473d-8f90-71d74edffecb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.260609] env[68569]: DEBUG oslo_concurrency.lockutils [req-395acf57-8f5d-4854-b36b-6ac911970172 req-cfb36499-bfbe-4e30-a0e9-a43c1a09133a service nova] Lock "c634f7eb-2f71-473d-8f90-71d74edffecb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.260790] env[68569]: DEBUG oslo_concurrency.lockutils [req-395acf57-8f5d-4854-b36b-6ac911970172 req-cfb36499-bfbe-4e30-a0e9-a43c1a09133a service nova] Lock "c634f7eb-2f71-473d-8f90-71d74edffecb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.260951] env[68569]: DEBUG nova.compute.manager [req-395acf57-8f5d-4854-b36b-6ac911970172 req-cfb36499-bfbe-4e30-a0e9-a43c1a09133a service nova] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] No waiting events found dispatching network-vif-plugged-cdf95c3f-e426-4563-995a-6fcfe2ffa912 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 761.261277] env[68569]: WARNING nova.compute.manager [req-395acf57-8f5d-4854-b36b-6ac911970172 req-cfb36499-bfbe-4e30-a0e9-a43c1a09133a service nova] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Received unexpected event network-vif-plugged-cdf95c3f-e426-4563-995a-6fcfe2ffa912 for instance with vm_state building and task_state spawning. [ 761.378352] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166893, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.384916] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Successfully updated port: cdf95c3f-e426-4563-995a-6fcfe2ffa912 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 761.589662] env[68569]: DEBUG nova.network.neutron [-] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.625456] env[68569]: DEBUG nova.compute.utils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 761.629333] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 761.629333] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 761.670940] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166895, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.685865] env[68569]: DEBUG oslo_concurrency.lockutils [None req-748bb606-f608-4fb5-8cb6-d75eb6f31313 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "e77cc179-1f3d-4095-a491-48df7f79bdb9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.303s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.703030] env[68569]: DEBUG nova.policy [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17242adad46e4c188e337060656e51c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '061d8bd706114545b34e9e0e6226d700', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 761.710826] env[68569]: DEBUG nova.compute.manager [req-a4d334cf-2606-470e-a6c9-2508b800135b req-01dd0f34-6894-4dfd-8678-870896f9948f service nova] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Received event network-vif-deleted-f51da7b0-c063-444e-9ff2-3b08c597f70c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 761.735896] env[68569]: DEBUG oslo_vmware.api [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Task: {'id': task-3166896, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.433189} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.736218] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 761.736394] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 761.736567] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 761.736732] env[68569]: INFO nova.compute.manager [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Took 1.64 seconds to destroy the instance on the hypervisor. [ 761.736968] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 761.737444] env[68569]: DEBUG nova.compute.manager [-] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 761.737543] env[68569]: DEBUG nova.network.neutron [-] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 761.885097] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166893, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.675417} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.887507] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "refresh_cache-c634f7eb-2f71-473d-8f90-71d74edffecb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.887507] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquired lock "refresh_cache-c634f7eb-2f71-473d-8f90-71d74edffecb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.887872] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 761.888847] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 64146253-16ab-4d95-83c9-31b74014a040/64146253-16ab-4d95-83c9-31b74014a040.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 761.889118] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 761.889381] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ac26e7ee-294e-4849-b5de-9e8c81387fe3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.896610] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 761.896610] env[68569]: value = "task-3166897" [ 761.896610] env[68569]: _type = "Task" [ 761.896610] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.908324] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166897, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.092162] env[68569]: INFO nova.compute.manager [-] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Took 1.31 seconds to deallocate network for instance. [ 762.129886] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 762.181287] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166895, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.307724] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Successfully created port: d682f08f-b00c-4ab2-bfd2-ffe374890b52 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 762.409255] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166897, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108391} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.410026] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 762.410655] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f707af-9467-4843-bb1d-f7e2b3c1a411 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.442379] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] 64146253-16ab-4d95-83c9-31b74014a040/64146253-16ab-4d95-83c9-31b74014a040.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 762.445473] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9860e6c6-9c8a-4d88-88ff-f6842302a214 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.468356] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 762.468356] env[68569]: value = "task-3166898" [ 762.468356] env[68569]: _type = "Task" [ 762.468356] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.477848] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166898, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.481631] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 762.503337] env[68569]: DEBUG nova.network.neutron [-] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.599275] env[68569]: DEBUG oslo_concurrency.lockutils [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.675655] env[68569]: DEBUG oslo_vmware.api [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3166895, 'name': PowerOnVM_Task, 'duration_secs': 1.151235} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.679024] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 762.679808] env[68569]: INFO nova.compute.manager [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Took 10.24 seconds to spawn the instance on the hypervisor. [ 762.679808] env[68569]: DEBUG nova.compute.manager [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 762.683659] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d57cc617-d471-45a2-9cb9-140836e0742e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.734402] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Updating instance_info_cache with network_info: [{"id": "cdf95c3f-e426-4563-995a-6fcfe2ffa912", "address": "fa:16:3e:26:96:71", "network": {"id": "9dc67ae7-e219-42f7-90ca-c00b1962ea5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325092138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "061d8bd706114545b34e9e0e6226d700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdf95c3f-e4", "ovs_interfaceid": "cdf95c3f-e426-4563-995a-6fcfe2ffa912", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.762554] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e59b9c6f-5b42-4058-ae62-8580d86be30d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.772690] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db43d88-8c62-4008-bc75-04f0ee7db257 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.808258] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6778ab-7bf3-4e36-91b5-5046670b321b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.815634] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1771d12e-36e9-4b8f-b20e-2f42dfe33c60 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.830669] env[68569]: DEBUG nova.compute.provider_tree [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.978610] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.008620] env[68569]: INFO nova.compute.manager [-] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Took 1.27 seconds to deallocate network for instance. [ 763.144407] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 763.172209] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 763.172496] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 763.172595] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 763.172792] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 763.172941] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 763.173141] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 763.173359] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 763.173510] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 763.173665] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 763.173817] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 763.174034] env[68569]: DEBUG nova.virt.hardware [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 763.174911] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155c47ec-8809-42fb-82fa-3fc432df4df1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.183447] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf19dfda-5a26-48ff-9294-1b28b954a8a2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.209186] env[68569]: INFO nova.compute.manager [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Took 49.51 seconds to build instance. [ 763.239153] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Releasing lock "refresh_cache-c634f7eb-2f71-473d-8f90-71d74edffecb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.239153] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Instance network_info: |[{"id": "cdf95c3f-e426-4563-995a-6fcfe2ffa912", "address": "fa:16:3e:26:96:71", "network": {"id": "9dc67ae7-e219-42f7-90ca-c00b1962ea5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325092138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "061d8bd706114545b34e9e0e6226d700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdf95c3f-e4", "ovs_interfaceid": "cdf95c3f-e426-4563-995a-6fcfe2ffa912", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 763.239487] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:96:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bd3c6b64-aba2-4bdc-a693-3b4dff3ed861', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cdf95c3f-e426-4563-995a-6fcfe2ffa912', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 763.249285] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 763.249285] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 763.249285] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16e8b4e5-ac57-4792-a593-90165d0e5f13 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.270055] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 763.270055] env[68569]: value = "task-3166899" [ 763.270055] env[68569]: _type = "Task" [ 763.270055] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.278890] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166899, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.298188] env[68569]: DEBUG nova.compute.manager [req-db7fbf5c-5a14-4128-b2c7-5695bf9ab240 req-d76e7423-20c2-4faa-bbff-282a4c4e855d service nova] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Received event network-changed-cdf95c3f-e426-4563-995a-6fcfe2ffa912 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 763.298188] env[68569]: DEBUG nova.compute.manager [req-db7fbf5c-5a14-4128-b2c7-5695bf9ab240 req-d76e7423-20c2-4faa-bbff-282a4c4e855d service nova] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Refreshing instance network info cache due to event network-changed-cdf95c3f-e426-4563-995a-6fcfe2ffa912. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 763.298188] env[68569]: DEBUG oslo_concurrency.lockutils [req-db7fbf5c-5a14-4128-b2c7-5695bf9ab240 req-d76e7423-20c2-4faa-bbff-282a4c4e855d service nova] Acquiring lock "refresh_cache-c634f7eb-2f71-473d-8f90-71d74edffecb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.298188] env[68569]: DEBUG oslo_concurrency.lockutils [req-db7fbf5c-5a14-4128-b2c7-5695bf9ab240 req-d76e7423-20c2-4faa-bbff-282a4c4e855d service nova] Acquired lock "refresh_cache-c634f7eb-2f71-473d-8f90-71d74edffecb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.298827] env[68569]: DEBUG nova.network.neutron [req-db7fbf5c-5a14-4128-b2c7-5695bf9ab240 req-d76e7423-20c2-4faa-bbff-282a4c4e855d service nova] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Refreshing network info cache for port cdf95c3f-e426-4563-995a-6fcfe2ffa912 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 763.334512] env[68569]: DEBUG nova.scheduler.client.report [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 763.482913] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166898, 'name': ReconfigVM_Task, 'duration_secs': 0.938348} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.485308] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Reconfigured VM instance instance-0000002c to attach disk [datastore2] 64146253-16ab-4d95-83c9-31b74014a040/64146253-16ab-4d95-83c9-31b74014a040.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 763.485308] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-afc7b570-3913-46bc-8f79-1184ce76f27a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.491311] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 763.491311] env[68569]: value = "task-3166900" [ 763.491311] env[68569]: _type = "Task" [ 763.491311] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.500434] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166900, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.521396] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.710964] env[68569]: DEBUG oslo_concurrency.lockutils [None req-80447c80-d715-4f94-ae54-701dd1d977b2 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "b40c9dec-cebc-4d23-8df4-96e804333706" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.102s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.785896] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166899, 'name': CreateVM_Task, 'duration_secs': 0.360794} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.786135] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 763.787471] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.787544] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.787858] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 763.788136] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a655714-04c5-4e25-981c-4a598a66a390 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.796400] env[68569]: DEBUG nova.compute.manager [req-489cd254-348b-4afb-afc2-6f0a032ef898 req-375438e7-f8ce-496b-89c1-eb0632242de7 service nova] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Received event network-vif-deleted-9b282c4e-170f-4f30-8c96-1a9b64168c47 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 763.796790] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 763.796790] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d1aa15-2275-7262-b24a-fb41a884153b" [ 763.796790] env[68569]: _type = "Task" [ 763.796790] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.806662] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d1aa15-2275-7262-b24a-fb41a884153b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.843776] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.730s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.844304] env[68569]: DEBUG nova.compute.manager [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 763.847946] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.648s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.848165] env[68569]: DEBUG nova.objects.instance [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Lazy-loading 'resources' on Instance uuid 7c887df0-4358-46c5-9682-0d4122e96d10 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 764.009107] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166900, 'name': Rename_Task, 'duration_secs': 0.171763} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.009483] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 764.009740] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0fb409d5-14a2-4195-9e7c-147cd4f5d4a8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.017035] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 764.017035] env[68569]: value = "task-3166901" [ 764.017035] env[68569]: _type = "Task" [ 764.017035] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.027358] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166901, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.214048] env[68569]: DEBUG nova.compute.manager [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 764.308933] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d1aa15-2275-7262-b24a-fb41a884153b, 'name': SearchDatastore_Task, 'duration_secs': 0.026646} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.309493] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.309928] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 764.310305] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.310601] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.310969] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 764.311921] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52199e0f-1c66-4c59-b047-83dd30384145 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.323252] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 764.323252] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 764.323252] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfecbf49-1dff-4053-aa2c-b1f6fb9b0d59 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.333045] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 764.333045] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b1ce82-70f3-9608-9552-ef60c12d2e70" [ 764.333045] env[68569]: _type = "Task" [ 764.333045] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.342946] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b1ce82-70f3-9608-9552-ef60c12d2e70, 'name': SearchDatastore_Task, 'duration_secs': 0.011194} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.344410] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6078665a-e193-4d6f-9f2d-2f06429b3266 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.351443] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 764.351443] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b3935d-15f1-f8c5-21bc-57ad8d42f9d4" [ 764.351443] env[68569]: _type = "Task" [ 764.351443] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.359339] env[68569]: DEBUG nova.compute.utils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 764.366525] env[68569]: DEBUG nova.compute.manager [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 764.366525] env[68569]: DEBUG nova.network.neutron [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 764.371494] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b3935d-15f1-f8c5-21bc-57ad8d42f9d4, 'name': SearchDatastore_Task, 'duration_secs': 0.0091} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.372545] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.372881] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] c634f7eb-2f71-473d-8f90-71d74edffecb/c634f7eb-2f71-473d-8f90-71d74edffecb.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 764.373181] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a5890f1-845f-4e30-b863-6b0ae8af0486 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.383249] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 764.383249] env[68569]: value = "task-3166902" [ 764.383249] env[68569]: _type = "Task" [ 764.383249] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.396595] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166902, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.428060] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Successfully updated port: d682f08f-b00c-4ab2-bfd2-ffe374890b52 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 764.482351] env[68569]: DEBUG nova.policy [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05dba3760f454212b40e95dcd8b3a711', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '54ed048337034227adac0c0a9fa64639', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 764.487965] env[68569]: DEBUG nova.network.neutron [req-db7fbf5c-5a14-4128-b2c7-5695bf9ab240 req-d76e7423-20c2-4faa-bbff-282a4c4e855d service nova] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Updated VIF entry in instance network info cache for port cdf95c3f-e426-4563-995a-6fcfe2ffa912. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 764.488336] env[68569]: DEBUG nova.network.neutron [req-db7fbf5c-5a14-4128-b2c7-5695bf9ab240 req-d76e7423-20c2-4faa-bbff-282a4c4e855d service nova] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Updating instance_info_cache with network_info: [{"id": "cdf95c3f-e426-4563-995a-6fcfe2ffa912", "address": "fa:16:3e:26:96:71", "network": {"id": "9dc67ae7-e219-42f7-90ca-c00b1962ea5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325092138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "061d8bd706114545b34e9e0e6226d700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdf95c3f-e4", "ovs_interfaceid": "cdf95c3f-e426-4563-995a-6fcfe2ffa912", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.530305] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166901, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.738500] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.869379] env[68569]: DEBUG nova.compute.manager [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 764.895343] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166902, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.938319] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "refresh_cache-ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.938379] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquired lock "refresh_cache-ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.939011] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 764.976060] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a917c7ce-364a-4004-bf2d-20c5ba11b528 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.984666] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df3bbd1-49cb-445a-a452-9e8e91706b6c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.994696] env[68569]: DEBUG oslo_concurrency.lockutils [req-db7fbf5c-5a14-4128-b2c7-5695bf9ab240 req-d76e7423-20c2-4faa-bbff-282a4c4e855d service nova] Releasing lock "refresh_cache-c634f7eb-2f71-473d-8f90-71d74edffecb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.024167] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e74803-f5c4-486a-bc09-a2416aeee371 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.032682] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166901, 'name': PowerOnVM_Task, 'duration_secs': 0.602532} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.036613] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 765.036613] env[68569]: INFO nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Took 8.66 seconds to spawn the instance on the hypervisor. [ 765.036613] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 765.036613] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e33fe0a7-722d-4e0f-9f7d-420e4d0e8454 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.040077] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34fa3c7-9387-43a9-a79e-893dfde44089 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.058557] env[68569]: DEBUG nova.compute.provider_tree [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.233053] env[68569]: DEBUG nova.network.neutron [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Successfully created port: ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 765.396115] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166902, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.661758} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.397150] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] c634f7eb-2f71-473d-8f90-71d74edffecb/c634f7eb-2f71-473d-8f90-71d74edffecb.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 765.397150] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 765.404460] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-22fc5c1c-5c48-4a1e-9570-d6ee81b82725 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.417020] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 765.417020] env[68569]: value = "task-3166903" [ 765.417020] env[68569]: _type = "Task" [ 765.417020] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.430358] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166903, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.516379] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "b770fbd1-579a-4e3e-a5c9-9f030695f057" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.516581] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "b770fbd1-579a-4e3e-a5c9-9f030695f057" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.531921] env[68569]: DEBUG nova.compute.manager [req-421e9938-713d-45ab-ba27-30b6a983f9a1 req-280e1d17-22bc-4328-ba0e-86076670770c service nova] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Received event network-vif-plugged-d682f08f-b00c-4ab2-bfd2-ffe374890b52 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 765.533172] env[68569]: DEBUG oslo_concurrency.lockutils [req-421e9938-713d-45ab-ba27-30b6a983f9a1 req-280e1d17-22bc-4328-ba0e-86076670770c service nova] Acquiring lock "ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.533172] env[68569]: DEBUG oslo_concurrency.lockutils [req-421e9938-713d-45ab-ba27-30b6a983f9a1 req-280e1d17-22bc-4328-ba0e-86076670770c service nova] Lock "ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.533172] env[68569]: DEBUG oslo_concurrency.lockutils [req-421e9938-713d-45ab-ba27-30b6a983f9a1 req-280e1d17-22bc-4328-ba0e-86076670770c service nova] Lock "ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.533172] env[68569]: DEBUG nova.compute.manager [req-421e9938-713d-45ab-ba27-30b6a983f9a1 req-280e1d17-22bc-4328-ba0e-86076670770c service nova] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] No waiting events found dispatching network-vif-plugged-d682f08f-b00c-4ab2-bfd2-ffe374890b52 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 765.533172] env[68569]: WARNING nova.compute.manager [req-421e9938-713d-45ab-ba27-30b6a983f9a1 req-280e1d17-22bc-4328-ba0e-86076670770c service nova] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Received unexpected event network-vif-plugged-d682f08f-b00c-4ab2-bfd2-ffe374890b52 for instance with vm_state building and task_state spawning. [ 765.533945] env[68569]: DEBUG nova.compute.manager [req-421e9938-713d-45ab-ba27-30b6a983f9a1 req-280e1d17-22bc-4328-ba0e-86076670770c service nova] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Received event network-changed-d682f08f-b00c-4ab2-bfd2-ffe374890b52 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 765.534172] env[68569]: DEBUG nova.compute.manager [req-421e9938-713d-45ab-ba27-30b6a983f9a1 req-280e1d17-22bc-4328-ba0e-86076670770c service nova] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Refreshing instance network info cache due to event network-changed-d682f08f-b00c-4ab2-bfd2-ffe374890b52. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 765.534361] env[68569]: DEBUG oslo_concurrency.lockutils [req-421e9938-713d-45ab-ba27-30b6a983f9a1 req-280e1d17-22bc-4328-ba0e-86076670770c service nova] Acquiring lock "refresh_cache-ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.541404] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.565585] env[68569]: DEBUG nova.scheduler.client.report [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 765.579724] env[68569]: INFO nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Took 50.76 seconds to build instance. [ 765.886142] env[68569]: DEBUG nova.compute.manager [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 765.916433] env[68569]: DEBUG nova.virt.hardware [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 765.916677] env[68569]: DEBUG nova.virt.hardware [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 765.916830] env[68569]: DEBUG nova.virt.hardware [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 765.917015] env[68569]: DEBUG nova.virt.hardware [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 765.917352] env[68569]: DEBUG nova.virt.hardware [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 765.917913] env[68569]: DEBUG nova.virt.hardware [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 765.918225] env[68569]: DEBUG nova.virt.hardware [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 765.918434] env[68569]: DEBUG nova.virt.hardware [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 765.918637] env[68569]: DEBUG nova.virt.hardware [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 765.918841] env[68569]: DEBUG nova.virt.hardware [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 765.919055] env[68569]: DEBUG nova.virt.hardware [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 765.920040] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c5e943-1297-40dc-9a34-dbeb8f71dfc9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.926259] env[68569]: DEBUG nova.network.neutron [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Updating instance_info_cache with network_info: [{"id": "d682f08f-b00c-4ab2-bfd2-ffe374890b52", "address": "fa:16:3e:a6:1b:f4", "network": {"id": "9dc67ae7-e219-42f7-90ca-c00b1962ea5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325092138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "061d8bd706114545b34e9e0e6226d700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd682f08f-b0", "ovs_interfaceid": "d682f08f-b00c-4ab2-bfd2-ffe374890b52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.938766] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166903, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.219955} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.940828] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 765.943404] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ddf67a4-10a2-4922-9630-d4b4b0ef6757 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.950282] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc908083-2adc-41cf-83f0-2ec490ca637e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.986314] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] c634f7eb-2f71-473d-8f90-71d74edffecb/c634f7eb-2f71-473d-8f90-71d74edffecb.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 765.999483] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c263606-ea86-47a5-b276-50682a99d8ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.025798] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 766.025798] env[68569]: value = "task-3166904" [ 766.025798] env[68569]: _type = "Task" [ 766.025798] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.033929] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166904, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.078826] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.231s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.081611] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.708s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.081611] env[68569]: DEBUG nova.objects.instance [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Lazy-loading 'resources' on Instance uuid 77b9756e-2299-47e2-a6d8-e8026e33a3de {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 766.083471] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "64146253-16ab-4d95-83c9-31b74014a040" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.702s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.113267] env[68569]: INFO nova.scheduler.client.report [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Deleted allocations for instance 7c887df0-4358-46c5-9682-0d4122e96d10 [ 766.433162] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Releasing lock "refresh_cache-ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.433162] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Instance network_info: |[{"id": "d682f08f-b00c-4ab2-bfd2-ffe374890b52", "address": "fa:16:3e:a6:1b:f4", "network": {"id": "9dc67ae7-e219-42f7-90ca-c00b1962ea5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325092138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "061d8bd706114545b34e9e0e6226d700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd682f08f-b0", "ovs_interfaceid": "d682f08f-b00c-4ab2-bfd2-ffe374890b52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 766.433499] env[68569]: DEBUG oslo_concurrency.lockutils [req-421e9938-713d-45ab-ba27-30b6a983f9a1 req-280e1d17-22bc-4328-ba0e-86076670770c service nova] Acquired lock "refresh_cache-ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.433499] env[68569]: DEBUG nova.network.neutron [req-421e9938-713d-45ab-ba27-30b6a983f9a1 req-280e1d17-22bc-4328-ba0e-86076670770c service nova] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Refreshing network info cache for port d682f08f-b00c-4ab2-bfd2-ffe374890b52 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 766.433499] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:1b:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bd3c6b64-aba2-4bdc-a693-3b4dff3ed861', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd682f08f-b00c-4ab2-bfd2-ffe374890b52', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 766.443162] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 766.444263] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 766.444494] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a092e2d4-e3be-4ff7-b810-f6a0c90cf809 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.468243] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 766.468243] env[68569]: value = "task-3166905" [ 766.468243] env[68569]: _type = "Task" [ 766.468243] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.475814] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166905, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.538935] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166904, 'name': ReconfigVM_Task, 'duration_secs': 0.401119} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.540104] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Reconfigured VM instance instance-0000002d to attach disk [datastore1] c634f7eb-2f71-473d-8f90-71d74edffecb/c634f7eb-2f71-473d-8f90-71d74edffecb.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 766.541036] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7323e924-4c3c-4cf1-95d7-78a653d129b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.548055] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 766.548055] env[68569]: value = "task-3166906" [ 766.548055] env[68569]: _type = "Task" [ 766.548055] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.556378] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166906, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.588955] env[68569]: DEBUG nova.compute.manager [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 766.622847] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8930b841-64d0-4e9b-8178-429501b933c9 tempest-ImagesOneServerTestJSON-1338934721 tempest-ImagesOneServerTestJSON-1338934721-project-member] Lock "7c887df0-4358-46c5-9682-0d4122e96d10" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.085s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.780616] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "ab021831-2cc3-4457-aa55-b55036c2a423" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.781130] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "ab021831-2cc3-4457-aa55-b55036c2a423" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.730426] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Acquiring lock "b2e6de60-b4e5-4030-bca7-355d17fec06d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.731201] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Lock "b2e6de60-b4e5-4030-bca7-355d17fec06d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.731201] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Acquiring lock "b2e6de60-b4e5-4030-bca7-355d17fec06d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.731201] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Lock "b2e6de60-b4e5-4030-bca7-355d17fec06d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.731366] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Lock "b2e6de60-b4e5-4030-bca7-355d17fec06d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.732960] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166905, 'name': CreateVM_Task, 'duration_secs': 0.348168} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.733410] env[68569]: INFO nova.compute.manager [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Terminating instance [ 767.738950] env[68569]: DEBUG nova.network.neutron [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Successfully updated port: ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 767.741088] env[68569]: DEBUG nova.compute.manager [req-0fe16be3-cd0e-4972-9495-0be73f052f70 req-91a2a91e-13ce-498b-804e-b220db4a1c14 service nova] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Received event network-vif-plugged-ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 767.741088] env[68569]: DEBUG oslo_concurrency.lockutils [req-0fe16be3-cd0e-4972-9495-0be73f052f70 req-91a2a91e-13ce-498b-804e-b220db4a1c14 service nova] Acquiring lock "98efdafe-e02b-46ca-a701-b70042513128-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.741218] env[68569]: DEBUG oslo_concurrency.lockutils [req-0fe16be3-cd0e-4972-9495-0be73f052f70 req-91a2a91e-13ce-498b-804e-b220db4a1c14 service nova] Lock "98efdafe-e02b-46ca-a701-b70042513128-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.741867] env[68569]: DEBUG oslo_concurrency.lockutils [req-0fe16be3-cd0e-4972-9495-0be73f052f70 req-91a2a91e-13ce-498b-804e-b220db4a1c14 service nova] Lock "98efdafe-e02b-46ca-a701-b70042513128-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.741867] env[68569]: DEBUG nova.compute.manager [req-0fe16be3-cd0e-4972-9495-0be73f052f70 req-91a2a91e-13ce-498b-804e-b220db4a1c14 service nova] [instance: 98efdafe-e02b-46ca-a701-b70042513128] No waiting events found dispatching network-vif-plugged-ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 767.741867] env[68569]: WARNING nova.compute.manager [req-0fe16be3-cd0e-4972-9495-0be73f052f70 req-91a2a91e-13ce-498b-804e-b220db4a1c14 service nova] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Received unexpected event network-vif-plugged-ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb for instance with vm_state building and task_state spawning. [ 767.745650] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 767.749688] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.749850] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.750194] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 767.751603] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-902047da-ff22-4c69-ac15-edd23811574d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.758758] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166906, 'name': Rename_Task, 'duration_secs': 0.158941} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.759309] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.760223] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 767.760460] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 767.760460] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a85623-bf19-636b-d782-0def37d36728" [ 767.760460] env[68569]: _type = "Task" [ 767.760460] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.761091] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0fbe13e-01ba-4808-9925-435b926b130f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.772871] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 767.772871] env[68569]: value = "task-3166907" [ 767.772871] env[68569]: _type = "Task" [ 767.772871] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.775910] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a85623-bf19-636b-d782-0def37d36728, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.787564] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166907, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.979654] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a84e130-a9dc-4b22-9ae0-10f806e92f6d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.988512] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba36afb-ef7d-48e7-9bd7-40ce370412a0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.022544] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942e161e-5f64-4b54-bc4c-2f414abb3e95 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.030547] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a835f9fa-5768-4a40-849d-5147b6424c29 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.035452] env[68569]: DEBUG nova.network.neutron [req-421e9938-713d-45ab-ba27-30b6a983f9a1 req-280e1d17-22bc-4328-ba0e-86076670770c service nova] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Updated VIF entry in instance network info cache for port d682f08f-b00c-4ab2-bfd2-ffe374890b52. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 768.035912] env[68569]: DEBUG nova.network.neutron [req-421e9938-713d-45ab-ba27-30b6a983f9a1 req-280e1d17-22bc-4328-ba0e-86076670770c service nova] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Updating instance_info_cache with network_info: [{"id": "d682f08f-b00c-4ab2-bfd2-ffe374890b52", "address": "fa:16:3e:a6:1b:f4", "network": {"id": "9dc67ae7-e219-42f7-90ca-c00b1962ea5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325092138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "061d8bd706114545b34e9e0e6226d700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd682f08f-b0", "ovs_interfaceid": "d682f08f-b00c-4ab2-bfd2-ffe374890b52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.049383] env[68569]: DEBUG nova.compute.provider_tree [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.055171] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "123a6895-af16-493a-afce-7ae6c2137422" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.055393] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "123a6895-af16-493a-afce-7ae6c2137422" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.249910] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Acquiring lock "refresh_cache-98efdafe-e02b-46ca-a701-b70042513128" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.250221] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Acquired lock "refresh_cache-98efdafe-e02b-46ca-a701-b70042513128" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.250315] env[68569]: DEBUG nova.network.neutron [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 768.251843] env[68569]: DEBUG nova.compute.manager [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 768.252062] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 768.253300] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2a9f0c-ada9-4c35-9eec-e947732072c2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.262029] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 768.262698] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e914f0e-302e-4c24-9646-be1a1fa91c9c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.273122] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a85623-bf19-636b-d782-0def37d36728, 'name': SearchDatastore_Task, 'duration_secs': 0.018423} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.274373] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.274553] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.275639] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.275639] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.275639] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 768.275639] env[68569]: DEBUG oslo_vmware.api [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Waiting for the task: (returnval){ [ 768.275639] env[68569]: value = "task-3166908" [ 768.275639] env[68569]: _type = "Task" [ 768.275639] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.275639] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd991f5e-67bb-46dd-8aa0-82213359984d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.288361] env[68569]: DEBUG oslo_vmware.api [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166908, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.292994] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166907, 'name': PowerOnVM_Task, 'duration_secs': 0.473788} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.294092] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 768.294316] env[68569]: INFO nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Took 7.82 seconds to spawn the instance on the hypervisor. [ 768.294395] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 768.294814] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 768.294814] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 768.296329] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08585aa-5895-4417-9d02-320b200ce2fc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.298853] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89bc7ef2-0e1d-4688-b8e7-1cd63449d9b3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.309522] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 768.309522] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f17ba1-2090-a9e3-3853-b88cd1b8f968" [ 768.309522] env[68569]: _type = "Task" [ 768.309522] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.321666] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f17ba1-2090-a9e3-3853-b88cd1b8f968, 'name': SearchDatastore_Task, 'duration_secs': 0.011056} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.322545] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1321361d-590e-46b9-83bf-8b7dcfbe3b67 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.329854] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 768.329854] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c46512-e8fe-d8d6-7b7d-a75e4cacf843" [ 768.329854] env[68569]: _type = "Task" [ 768.329854] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.338045] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c46512-e8fe-d8d6-7b7d-a75e4cacf843, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.540322] env[68569]: DEBUG oslo_concurrency.lockutils [req-421e9938-713d-45ab-ba27-30b6a983f9a1 req-280e1d17-22bc-4328-ba0e-86076670770c service nova] Releasing lock "refresh_cache-ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.557882] env[68569]: DEBUG nova.scheduler.client.report [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 768.791644] env[68569]: DEBUG oslo_vmware.api [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166908, 'name': PowerOffVM_Task, 'duration_secs': 0.230394} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.791972] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 768.792166] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 768.792914] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a158407-f1a1-4e90-8955-cf974c4abee8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.818384] env[68569]: INFO nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Took 52.48 seconds to build instance. [ 768.847058] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c46512-e8fe-d8d6-7b7d-a75e4cacf843, 'name': SearchDatastore_Task, 'duration_secs': 0.010116} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.847746] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.848229] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae/ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 768.848675] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e7384b8-eb2d-4fa7-9bec-7e6206c2589c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.853778] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 768.854107] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 768.854357] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Deleting the datastore file [datastore1] b2e6de60-b4e5-4030-bca7-355d17fec06d {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 768.855313] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79ec9a73-b2f8-4268-a949-0a469452c8cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.859032] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 768.859032] env[68569]: value = "task-3166910" [ 768.859032] env[68569]: _type = "Task" [ 768.859032] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.863654] env[68569]: DEBUG oslo_vmware.api [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Waiting for the task: (returnval){ [ 768.863654] env[68569]: value = "task-3166911" [ 768.863654] env[68569]: _type = "Task" [ 768.863654] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.874496] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166910, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.877493] env[68569]: DEBUG oslo_vmware.api [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166911, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.049055] env[68569]: DEBUG nova.network.neutron [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.065098] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.984s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.072993] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.603s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 769.078259] env[68569]: INFO nova.compute.claims [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 769.110737] env[68569]: INFO nova.scheduler.client.report [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Deleted allocations for instance 77b9756e-2299-47e2-a6d8-e8026e33a3de [ 769.254979] env[68569]: DEBUG nova.network.neutron [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Updating instance_info_cache with network_info: [{"id": "ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb", "address": "fa:16:3e:2c:42:65", "network": {"id": "d7061ac1-d48d-405d-93c6-033d5a80c4c6", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-722151018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54ed048337034227adac0c0a9fa64639", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a5569c57-a27b-4d4d-a519-784be105114f", "external-id": "nsx-vlan-transportzone-640", "segmentation_id": 640, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba6eee62-7d", "ovs_interfaceid": "ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.323664] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "c634f7eb-2f71-473d-8f90-71d74edffecb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.909s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.375513] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166910, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.379733] env[68569]: DEBUG oslo_vmware.api [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Task: {'id': task-3166911, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199678} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.380288] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 769.380383] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 769.380602] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 769.380814] env[68569]: INFO nova.compute.manager [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 769.381202] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 769.381511] env[68569]: DEBUG nova.compute.manager [-] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 769.381676] env[68569]: DEBUG nova.network.neutron [-] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 769.600552] env[68569]: DEBUG nova.compute.manager [req-c3288096-aec6-41b3-85bf-18b26f066223 req-48a3135f-e9eb-4f4a-8db0-ce9d47550507 service nova] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Received event network-changed-ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 769.600745] env[68569]: DEBUG nova.compute.manager [req-c3288096-aec6-41b3-85bf-18b26f066223 req-48a3135f-e9eb-4f4a-8db0-ce9d47550507 service nova] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Refreshing instance network info cache due to event network-changed-ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 769.600934] env[68569]: DEBUG oslo_concurrency.lockutils [req-c3288096-aec6-41b3-85bf-18b26f066223 req-48a3135f-e9eb-4f4a-8db0-ce9d47550507 service nova] Acquiring lock "refresh_cache-98efdafe-e02b-46ca-a701-b70042513128" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.624117] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f6653de0-91d5-4598-b9af-2b9f7c3ff244 tempest-ImagesNegativeTestJSON-1070738498 tempest-ImagesNegativeTestJSON-1070738498-project-member] Lock "77b9756e-2299-47e2-a6d8-e8026e33a3de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.698s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.758622] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Releasing lock "refresh_cache-98efdafe-e02b-46ca-a701-b70042513128" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.758998] env[68569]: DEBUG nova.compute.manager [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Instance network_info: |[{"id": "ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb", "address": "fa:16:3e:2c:42:65", "network": {"id": "d7061ac1-d48d-405d-93c6-033d5a80c4c6", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-722151018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54ed048337034227adac0c0a9fa64639", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a5569c57-a27b-4d4d-a519-784be105114f", "external-id": "nsx-vlan-transportzone-640", "segmentation_id": 640, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba6eee62-7d", "ovs_interfaceid": "ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 769.759589] env[68569]: DEBUG oslo_concurrency.lockutils [req-c3288096-aec6-41b3-85bf-18b26f066223 req-48a3135f-e9eb-4f4a-8db0-ce9d47550507 service nova] Acquired lock "refresh_cache-98efdafe-e02b-46ca-a701-b70042513128" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.759800] env[68569]: DEBUG nova.network.neutron [req-c3288096-aec6-41b3-85bf-18b26f066223 req-48a3135f-e9eb-4f4a-8db0-ce9d47550507 service nova] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Refreshing network info cache for port ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 769.761415] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:42:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a5569c57-a27b-4d4d-a519-784be105114f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 769.769110] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Creating folder: Project (54ed048337034227adac0c0a9fa64639). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.770077] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ba36898-9bf5-4b8c-afd0-7f6e63eca6f1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.781596] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Created folder: Project (54ed048337034227adac0c0a9fa64639) in parent group-v633430. [ 769.781783] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Creating folder: Instances. Parent ref: group-v633567. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.782057] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f26c2de8-7473-4984-ae70-a86e29569117 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.792324] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Created folder: Instances in parent group-v633567. [ 769.792959] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 769.792959] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 769.793458] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2869f7bf-d1a8-4697-bc58-8314b01fa24c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.817036] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 769.817036] env[68569]: value = "task-3166914" [ 769.817036] env[68569]: _type = "Task" [ 769.817036] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.826188] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166914, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.826714] env[68569]: DEBUG nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 769.874156] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166910, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.623587} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.874156] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae/ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 769.874483] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 769.874483] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-360b77da-aa23-40f6-9666-0c45524650e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.883646] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 769.883646] env[68569]: value = "task-3166915" [ 769.883646] env[68569]: _type = "Task" [ 769.883646] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.893743] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166915, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.333268] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166914, 'name': CreateVM_Task, 'duration_secs': 0.402649} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.337745] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 770.338758] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.339026] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.339276] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 770.340094] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-542778c3-c2f1-42bf-a387-01041ebf9303 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.345748] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Waiting for the task: (returnval){ [ 770.345748] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e43de7-4365-a0fb-7ae0-e41b869e2735" [ 770.345748] env[68569]: _type = "Task" [ 770.345748] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.353059] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.359553] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e43de7-4365-a0fb-7ae0-e41b869e2735, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.397396] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166915, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079809} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.397803] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 770.398702] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9af54b8-af57-47a3-8785-38dc64272047 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.428873] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae/ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 770.432630] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97220068-f8aa-45ac-8210-8646b1fac336 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.449653] env[68569]: DEBUG nova.network.neutron [-] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.462352] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 770.462352] env[68569]: value = "task-3166916" [ 770.462352] env[68569]: _type = "Task" [ 770.462352] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.475835] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166916, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.724884] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06a6e73-a86d-4f23-85b4-d1187be809d5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.736769] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeacf2b1-d67d-4c93-9fdf-adb5d4973934 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.773399] env[68569]: DEBUG nova.network.neutron [req-c3288096-aec6-41b3-85bf-18b26f066223 req-48a3135f-e9eb-4f4a-8db0-ce9d47550507 service nova] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Updated VIF entry in instance network info cache for port ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 770.774050] env[68569]: DEBUG nova.network.neutron [req-c3288096-aec6-41b3-85bf-18b26f066223 req-48a3135f-e9eb-4f4a-8db0-ce9d47550507 service nova] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Updating instance_info_cache with network_info: [{"id": "ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb", "address": "fa:16:3e:2c:42:65", "network": {"id": "d7061ac1-d48d-405d-93c6-033d5a80c4c6", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-722151018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54ed048337034227adac0c0a9fa64639", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a5569c57-a27b-4d4d-a519-784be105114f", "external-id": "nsx-vlan-transportzone-640", "segmentation_id": 640, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba6eee62-7d", "ovs_interfaceid": "ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.777256] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d151b9a-705e-4376-a2fa-9bad14c7d261 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.785018] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c6acef-bd95-4cfb-951a-b33e251a9d15 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.802530] env[68569]: DEBUG nova.compute.provider_tree [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.856116] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e43de7-4365-a0fb-7ae0-e41b869e2735, 'name': SearchDatastore_Task, 'duration_secs': 0.019171} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.856492] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.856873] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 770.856873] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.857053] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.858289] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 770.858289] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95e37306-fff1-4a54-aeda-7bf83f4d0cd9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.867040] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 770.867243] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 770.868306] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0acda73-2262-4430-852e-2811ab9ce3ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.874089] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Waiting for the task: (returnval){ [ 770.874089] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5224633e-3fd5-02dc-3e60-26b3aef408ef" [ 770.874089] env[68569]: _type = "Task" [ 770.874089] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.881908] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5224633e-3fd5-02dc-3e60-26b3aef408ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.954712] env[68569]: INFO nova.compute.manager [-] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Took 1.57 seconds to deallocate network for instance. [ 770.975285] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166916, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.277074] env[68569]: DEBUG oslo_concurrency.lockutils [req-c3288096-aec6-41b3-85bf-18b26f066223 req-48a3135f-e9eb-4f4a-8db0-ce9d47550507 service nova] Releasing lock "refresh_cache-98efdafe-e02b-46ca-a701-b70042513128" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.308263] env[68569]: DEBUG nova.scheduler.client.report [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 771.386463] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5224633e-3fd5-02dc-3e60-26b3aef408ef, 'name': SearchDatastore_Task, 'duration_secs': 0.011709} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.386678] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a5287ce-00e3-4a91-afdd-5428463b1e3a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.394539] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Waiting for the task: (returnval){ [ 771.394539] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520bd87f-15f4-4b51-7269-1d8f2bd7e3b6" [ 771.394539] env[68569]: _type = "Task" [ 771.394539] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.403915] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520bd87f-15f4-4b51-7269-1d8f2bd7e3b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.467022] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.477654] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166916, 'name': ReconfigVM_Task, 'duration_secs': 0.542999} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.477654] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Reconfigured VM instance instance-0000002e to attach disk [datastore1] ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae/ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 771.477654] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6193ddee-cb90-43e8-b0ec-26337fd70166 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.484384] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 771.484384] env[68569]: value = "task-3166917" [ 771.484384] env[68569]: _type = "Task" [ 771.484384] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.496552] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166917, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.635397] env[68569]: DEBUG nova.compute.manager [req-b03d83aa-c809-4929-b65d-8df7c676bd11 req-dec0ef1f-d4c0-4299-a9d1-3ebf2a437e15 service nova] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Received event network-vif-deleted-192a6986-c831-42da-bce9-c4bde617262e {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 771.816233] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.743s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.816754] env[68569]: DEBUG nova.compute.manager [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 771.824074] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.502s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.824074] env[68569]: INFO nova.compute.claims [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 771.909515] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520bd87f-15f4-4b51-7269-1d8f2bd7e3b6, 'name': SearchDatastore_Task, 'duration_secs': 0.024831} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.909903] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.910291] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 98efdafe-e02b-46ca-a701-b70042513128/98efdafe-e02b-46ca-a701-b70042513128.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 771.910603] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93dfd7d2-ccae-484e-a083-42426e42f8db {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.917439] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Waiting for the task: (returnval){ [ 771.917439] env[68569]: value = "task-3166918" [ 771.917439] env[68569]: _type = "Task" [ 771.917439] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.927746] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166918, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.996298] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166917, 'name': Rename_Task, 'duration_secs': 0.276154} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.997323] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 771.997323] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-155e3a72-3ea1-41cf-b1c0-3fede67d53a0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.003736] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 772.003736] env[68569]: value = "task-3166919" [ 772.003736] env[68569]: _type = "Task" [ 772.003736] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.012780] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166919, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.323593] env[68569]: DEBUG nova.compute.utils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 772.325946] env[68569]: DEBUG nova.compute.manager [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 772.326156] env[68569]: DEBUG nova.network.neutron [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 772.395014] env[68569]: DEBUG nova.policy [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '842e74e7139540d7a537eb8bd56bca78', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e52d937c83d46daa36746494bd7ccbe', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 772.432013] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166918, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.517256] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166919, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.830872] env[68569]: DEBUG nova.compute.manager [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 772.919543] env[68569]: DEBUG nova.network.neutron [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Successfully created port: 8c249c42-adbc-4fe1-994f-1f00f6dfd6d1 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 772.935203] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166918, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.758209} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.935528] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 98efdafe-e02b-46ca-a701-b70042513128/98efdafe-e02b-46ca-a701-b70042513128.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 772.935528] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 772.935969] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b118e210-94c1-4a80-a6ba-ba7a70d39d26 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.943112] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Waiting for the task: (returnval){ [ 772.943112] env[68569]: value = "task-3166920" [ 772.943112] env[68569]: _type = "Task" [ 772.943112] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.958996] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166920, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.020071] env[68569]: DEBUG oslo_vmware.api [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166919, 'name': PowerOnVM_Task, 'duration_secs': 0.670321} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.020441] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 773.021171] env[68569]: INFO nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Took 9.88 seconds to spawn the instance on the hypervisor. [ 773.021171] env[68569]: DEBUG nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 773.021791] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c5be9a-baee-443f-9bb2-b9f4e808191b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.458945] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166920, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068182} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.459999] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 773.460206] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa948f02-3ba5-4d81-8484-11ed37cad9d9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.483816] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 98efdafe-e02b-46ca-a701-b70042513128/98efdafe-e02b-46ca-a701-b70042513128.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 773.486863] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76ec221b-e5f5-46e6-944b-c8ce17e80324 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.507613] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Waiting for the task: (returnval){ [ 773.507613] env[68569]: value = "task-3166921" [ 773.507613] env[68569]: _type = "Task" [ 773.507613] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.516646] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166921, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.543099] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1453f8f3-b69d-47e6-82b5-bc1cc3849a07 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.549032] env[68569]: INFO nova.compute.manager [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Took 46.70 seconds to build instance. [ 773.553439] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f9e055-678c-4116-bbaf-43ed503a262a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.589033] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ba2c55-4273-401e-9f22-101af21c822c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.597590] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a3fcd1-0685-41bb-b359-3c5e611793bb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.611337] env[68569]: DEBUG nova.compute.provider_tree [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 773.840650] env[68569]: DEBUG nova.compute.manager [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 773.868091] env[68569]: DEBUG nova.virt.hardware [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 773.868377] env[68569]: DEBUG nova.virt.hardware [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.869680] env[68569]: DEBUG nova.virt.hardware [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 773.869680] env[68569]: DEBUG nova.virt.hardware [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.869680] env[68569]: DEBUG nova.virt.hardware [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 773.869680] env[68569]: DEBUG nova.virt.hardware [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 773.869680] env[68569]: DEBUG nova.virt.hardware [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 773.870033] env[68569]: DEBUG nova.virt.hardware [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 773.870033] env[68569]: DEBUG nova.virt.hardware [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 773.870033] env[68569]: DEBUG nova.virt.hardware [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 773.870033] env[68569]: DEBUG nova.virt.hardware [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 773.870782] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea63d3e5-1621-4702-b81a-da16310e65f5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.881507] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a193383-9517-45f1-90b9-7296a7445252 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.021244] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166921, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.056900] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fffcb7ee-7163-4e51-909b-d1de7f785b92 tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.607s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.134634] env[68569]: ERROR nova.scheduler.client.report [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [req-ff08ea22-6df7-4fca-9ab4-b0afedb9344c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ff08ea22-6df7-4fca-9ab4-b0afedb9344c"}]} [ 774.151405] env[68569]: DEBUG nova.scheduler.client.report [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 774.164458] env[68569]: DEBUG nova.scheduler.client.report [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 774.167115] env[68569]: DEBUG nova.compute.provider_tree [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 77 to 78 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 774.167115] env[68569]: DEBUG nova.compute.provider_tree [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 774.184219] env[68569]: DEBUG nova.scheduler.client.report [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 774.215313] env[68569]: DEBUG nova.scheduler.client.report [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 774.520857] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166921, 'name': ReconfigVM_Task, 'duration_secs': 0.759012} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.524269] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 98efdafe-e02b-46ca-a701-b70042513128/98efdafe-e02b-46ca-a701-b70042513128.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 774.525175] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c03e0fb-4409-489d-b656-5a4fa9ee7432 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.533078] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Waiting for the task: (returnval){ [ 774.533078] env[68569]: value = "task-3166922" [ 774.533078] env[68569]: _type = "Task" [ 774.533078] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.546508] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166922, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.562112] env[68569]: DEBUG nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 774.696675] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Acquiring lock "060fc4c8-b173-4fc4-8232-e13e3eac9dc3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.696977] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Lock "060fc4c8-b173-4fc4-8232-e13e3eac9dc3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.767612] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "64146253-16ab-4d95-83c9-31b74014a040" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.767852] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "64146253-16ab-4d95-83c9-31b74014a040" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.768064] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "64146253-16ab-4d95-83c9-31b74014a040-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.769669] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "64146253-16ab-4d95-83c9-31b74014a040-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.769669] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "64146253-16ab-4d95-83c9-31b74014a040-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.770605] env[68569]: INFO nova.compute.manager [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Terminating instance [ 774.827763] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246bd2f4-b18e-441e-89ba-8a1b4163cf6a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.838136] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287cbc4e-f9fa-478c-b6cf-033e93cca5a7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.875915] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5f4bad-d6d7-411a-b582-faf91df8d7c7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.883956] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339e5c2d-c53b-4d3e-8b75-73cd4458cbd8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.901020] env[68569]: DEBUG nova.compute.provider_tree [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 775.006798] env[68569]: DEBUG nova.compute.manager [req-c93e1145-9c8c-4b8b-a5f4-fb8909dd4fce req-81c31431-ff75-4dfb-9f71-92405b2a3e26 service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Received event network-vif-plugged-8c249c42-adbc-4fe1-994f-1f00f6dfd6d1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 775.006798] env[68569]: DEBUG oslo_concurrency.lockutils [req-c93e1145-9c8c-4b8b-a5f4-fb8909dd4fce req-81c31431-ff75-4dfb-9f71-92405b2a3e26 service nova] Acquiring lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.006931] env[68569]: DEBUG oslo_concurrency.lockutils [req-c93e1145-9c8c-4b8b-a5f4-fb8909dd4fce req-81c31431-ff75-4dfb-9f71-92405b2a3e26 service nova] Lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.007060] env[68569]: DEBUG oslo_concurrency.lockutils [req-c93e1145-9c8c-4b8b-a5f4-fb8909dd4fce req-81c31431-ff75-4dfb-9f71-92405b2a3e26 service nova] Lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.007224] env[68569]: DEBUG nova.compute.manager [req-c93e1145-9c8c-4b8b-a5f4-fb8909dd4fce req-81c31431-ff75-4dfb-9f71-92405b2a3e26 service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] No waiting events found dispatching network-vif-plugged-8c249c42-adbc-4fe1-994f-1f00f6dfd6d1 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 775.007480] env[68569]: WARNING nova.compute.manager [req-c93e1145-9c8c-4b8b-a5f4-fb8909dd4fce req-81c31431-ff75-4dfb-9f71-92405b2a3e26 service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Received unexpected event network-vif-plugged-8c249c42-adbc-4fe1-994f-1f00f6dfd6d1 for instance with vm_state building and task_state spawning. [ 775.043805] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166922, 'name': Rename_Task, 'duration_secs': 0.147351} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.044096] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 775.044369] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e62c5c0-bca7-42f7-adb7-8ec9484cced9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.050993] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Waiting for the task: (returnval){ [ 775.050993] env[68569]: value = "task-3166923" [ 775.050993] env[68569]: _type = "Task" [ 775.050993] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.059241] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166923, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.083147] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.204612] env[68569]: DEBUG nova.network.neutron [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Successfully updated port: 8c249c42-adbc-4fe1-994f-1f00f6dfd6d1 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 775.279020] env[68569]: DEBUG nova.compute.manager [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 775.279020] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 775.279020] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33991223-e240-4868-ba3f-a8a1ec76cc3c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.286215] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 775.287595] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-310bcf41-deef-4830-8f20-3327c8bcde80 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.295626] env[68569]: DEBUG oslo_vmware.api [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 775.295626] env[68569]: value = "task-3166924" [ 775.295626] env[68569]: _type = "Task" [ 775.295626] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.309095] env[68569]: DEBUG oslo_vmware.api [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166924, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.444569] env[68569]: DEBUG nova.scheduler.client.report [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 78 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 775.444888] env[68569]: DEBUG nova.compute.provider_tree [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 78 to 79 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 775.445115] env[68569]: DEBUG nova.compute.provider_tree [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 775.560914] env[68569]: DEBUG oslo_vmware.api [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166923, 'name': PowerOnVM_Task, 'duration_secs': 0.468944} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.562676] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 775.562780] env[68569]: INFO nova.compute.manager [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Took 9.68 seconds to spawn the instance on the hypervisor. [ 775.562861] env[68569]: DEBUG nova.compute.manager [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 775.563833] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed64fbb-39ff-4f6d-ac71-518312342411 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.709111] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "refresh_cache-fd803a5e-8dbd-449e-b45d-1e6410a286e8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.709111] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired lock "refresh_cache-fd803a5e-8dbd-449e-b45d-1e6410a286e8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.709111] env[68569]: DEBUG nova.network.neutron [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 775.806267] env[68569]: DEBUG oslo_vmware.api [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166924, 'name': PowerOffVM_Task, 'duration_secs': 0.209386} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.806608] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 775.806776] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 775.807046] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a31f828-8ee4-498f-a166-5818861c1a8d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.866919] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 775.867177] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 775.867361] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Deleting the datastore file [datastore2] 64146253-16ab-4d95-83c9-31b74014a040 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 775.867645] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a3a27cc-c808-42c3-85d6-e0e17edbe40a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.875551] env[68569]: DEBUG oslo_vmware.api [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 775.875551] env[68569]: value = "task-3166926" [ 775.875551] env[68569]: _type = "Task" [ 775.875551] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.884415] env[68569]: DEBUG oslo_vmware.api [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166926, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.951883] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.131s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.952715] env[68569]: DEBUG nova.compute.manager [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 775.956595] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.963s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.956892] env[68569]: DEBUG nova.objects.instance [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Lazy-loading 'resources' on Instance uuid 2cde3729-1be6-42c5-891f-42a7a8bff267 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 776.085127] env[68569]: INFO nova.compute.manager [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Took 44.40 seconds to build instance. [ 776.272340] env[68569]: DEBUG nova.network.neutron [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 776.389945] env[68569]: DEBUG oslo_vmware.api [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3166926, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169839} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.393761] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 776.393761] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 776.393761] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 776.393761] env[68569]: INFO nova.compute.manager [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Took 1.11 seconds to destroy the instance on the hypervisor. [ 776.393761] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 776.394317] env[68569]: DEBUG nova.compute.manager [-] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 776.394317] env[68569]: DEBUG nova.network.neutron [-] [instance: 64146253-16ab-4d95-83c9-31b74014a040] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 776.463424] env[68569]: DEBUG nova.compute.utils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 776.465308] env[68569]: DEBUG nova.compute.manager [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 776.465488] env[68569]: DEBUG nova.network.neutron [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 776.514299] env[68569]: DEBUG nova.policy [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '330a73f609f746d8b8c1a7eefe557c69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'decd2576711b41bbb25300d9db62643e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 776.590826] env[68569]: DEBUG oslo_concurrency.lockutils [None req-cb21ba32-255e-4cab-a7a1-fd0c996d4a5d tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Lock "98efdafe-e02b-46ca-a701-b70042513128" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.596s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.633037] env[68569]: DEBUG nova.network.neutron [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Updating instance_info_cache with network_info: [{"id": "8c249c42-adbc-4fe1-994f-1f00f6dfd6d1", "address": "fa:16:3e:a4:0d:69", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c249c42-ad", "ovs_interfaceid": "8c249c42-adbc-4fe1-994f-1f00f6dfd6d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.866469] env[68569]: DEBUG nova.network.neutron [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Successfully created port: 9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 776.974153] env[68569]: DEBUG nova.compute.manager [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 777.046030] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4a64fa-6291-4263-b211-b9a14052d2a9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.054478] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42bd4b81-ae0c-4abc-a764-d59aef6fdba5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.087292] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ecb6a2-70a1-458d-bd94-c158d78df1b6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.096653] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05dcbd1a-6f3b-48d9-b22b-a7ffffd2f567 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.099732] env[68569]: DEBUG nova.compute.manager [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 777.114250] env[68569]: DEBUG nova.compute.provider_tree [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 777.136075] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Releasing lock "refresh_cache-fd803a5e-8dbd-449e-b45d-1e6410a286e8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.136388] env[68569]: DEBUG nova.compute.manager [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Instance network_info: |[{"id": "8c249c42-adbc-4fe1-994f-1f00f6dfd6d1", "address": "fa:16:3e:a4:0d:69", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c249c42-ad", "ovs_interfaceid": "8c249c42-adbc-4fe1-994f-1f00f6dfd6d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 777.137409] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:0d:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbdab640-5fea-4254-8bd3-f855b7eaca0d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c249c42-adbc-4fe1-994f-1f00f6dfd6d1', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 777.144276] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Creating folder: Project (9e52d937c83d46daa36746494bd7ccbe). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 777.145198] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0af6a22-9686-415e-ae4b-2ede5aa1b119 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.156601] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Created folder: Project (9e52d937c83d46daa36746494bd7ccbe) in parent group-v633430. [ 777.156797] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Creating folder: Instances. Parent ref: group-v633570. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 777.157045] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c144609-6b4d-4a6e-be31-d3a7669a0559 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.167543] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Created folder: Instances in parent group-v633570. [ 777.167767] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 777.167948] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 777.168163] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-26cbc9cd-2f95-400b-a02b-66fcc339c840 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.186499] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 777.186499] env[68569]: value = "task-3166929" [ 777.186499] env[68569]: _type = "Task" [ 777.186499] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.193667] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166929, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.312469] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0bb7ffe-3def-4e2b-aa74-198f20560fed tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Acquiring lock "interface-98efdafe-e02b-46ca-a701-b70042513128-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.312799] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0bb7ffe-3def-4e2b-aa74-198f20560fed tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Lock "interface-98efdafe-e02b-46ca-a701-b70042513128-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.313155] env[68569]: DEBUG nova.objects.instance [None req-a0bb7ffe-3def-4e2b-aa74-198f20560fed tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Lazy-loading 'flavor' on Instance uuid 98efdafe-e02b-46ca-a701-b70042513128 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 777.403908] env[68569]: DEBUG nova.network.neutron [-] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.626697] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.652451] env[68569]: DEBUG nova.scheduler.client.report [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 79 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 777.652451] env[68569]: DEBUG nova.compute.provider_tree [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 79 to 80 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 777.652655] env[68569]: DEBUG nova.compute.provider_tree [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 777.682046] env[68569]: DEBUG nova.compute.manager [req-5bd9f757-7526-490c-ba01-4f1812929684 req-7cd8f058-a2ef-42cc-9d44-5f6a246cce3f service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Received event network-changed-8c249c42-adbc-4fe1-994f-1f00f6dfd6d1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 777.682279] env[68569]: DEBUG nova.compute.manager [req-5bd9f757-7526-490c-ba01-4f1812929684 req-7cd8f058-a2ef-42cc-9d44-5f6a246cce3f service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Refreshing instance network info cache due to event network-changed-8c249c42-adbc-4fe1-994f-1f00f6dfd6d1. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 777.682487] env[68569]: DEBUG oslo_concurrency.lockutils [req-5bd9f757-7526-490c-ba01-4f1812929684 req-7cd8f058-a2ef-42cc-9d44-5f6a246cce3f service nova] Acquiring lock "refresh_cache-fd803a5e-8dbd-449e-b45d-1e6410a286e8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.682645] env[68569]: DEBUG oslo_concurrency.lockutils [req-5bd9f757-7526-490c-ba01-4f1812929684 req-7cd8f058-a2ef-42cc-9d44-5f6a246cce3f service nova] Acquired lock "refresh_cache-fd803a5e-8dbd-449e-b45d-1e6410a286e8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.682822] env[68569]: DEBUG nova.network.neutron [req-5bd9f757-7526-490c-ba01-4f1812929684 req-7cd8f058-a2ef-42cc-9d44-5f6a246cce3f service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Refreshing network info cache for port 8c249c42-adbc-4fe1-994f-1f00f6dfd6d1 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 777.702666] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166929, 'name': CreateVM_Task, 'duration_secs': 0.433613} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.702894] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 777.703577] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.703826] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.705219] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 777.705296] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f86ba3e-cc3a-4d1f-9440-294b87fec560 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.710227] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 777.710227] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5219a2b1-e9c7-1ca3-28f9-def903c65bc8" [ 777.710227] env[68569]: _type = "Task" [ 777.710227] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.718443] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5219a2b1-e9c7-1ca3-28f9-def903c65bc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.817434] env[68569]: DEBUG nova.objects.instance [None req-a0bb7ffe-3def-4e2b-aa74-198f20560fed tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Lazy-loading 'pci_requests' on Instance uuid 98efdafe-e02b-46ca-a701-b70042513128 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 777.909361] env[68569]: INFO nova.compute.manager [-] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Took 1.51 seconds to deallocate network for instance. [ 777.984202] env[68569]: DEBUG nova.compute.manager [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 778.018784] env[68569]: DEBUG nova.virt.hardware [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 778.019037] env[68569]: DEBUG nova.virt.hardware [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 778.019269] env[68569]: DEBUG nova.virt.hardware [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 778.019365] env[68569]: DEBUG nova.virt.hardware [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 778.019510] env[68569]: DEBUG nova.virt.hardware [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 778.019651] env[68569]: DEBUG nova.virt.hardware [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 778.019853] env[68569]: DEBUG nova.virt.hardware [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 778.020014] env[68569]: DEBUG nova.virt.hardware [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 778.020180] env[68569]: DEBUG nova.virt.hardware [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 778.020355] env[68569]: DEBUG nova.virt.hardware [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 778.020538] env[68569]: DEBUG nova.virt.hardware [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 778.021457] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d715f4a7-d51f-4467-987d-5758d97508b0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.029237] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffef40cf-c7d4-4f6d-9b6f-b4149d9232e9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.159442] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.203s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.161922] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.043s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.164161] env[68569]: INFO nova.compute.claims [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.180852] env[68569]: INFO nova.scheduler.client.report [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Deleted allocations for instance 2cde3729-1be6-42c5-891f-42a7a8bff267 [ 778.225194] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5219a2b1-e9c7-1ca3-28f9-def903c65bc8, 'name': SearchDatastore_Task, 'duration_secs': 0.012961} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.225531] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.225758] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 778.226099] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.226282] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.226469] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 778.229533] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f80a1e5-c541-4ac0-ba61-be3faa900487 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.240351] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 778.240557] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 778.241345] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59084ecd-a00c-4079-8c1b-e36f13181264 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.247743] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 778.247743] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bde2b8-ae71-48d9-620c-48f2353a6204" [ 778.247743] env[68569]: _type = "Task" [ 778.247743] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.257451] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bde2b8-ae71-48d9-620c-48f2353a6204, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.320526] env[68569]: DEBUG nova.objects.base [None req-a0bb7ffe-3def-4e2b-aa74-198f20560fed tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Object Instance<98efdafe-e02b-46ca-a701-b70042513128> lazy-loaded attributes: flavor,pci_requests {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 778.320605] env[68569]: DEBUG nova.network.neutron [None req-a0bb7ffe-3def-4e2b-aa74-198f20560fed tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 778.416321] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.476300] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0bb7ffe-3def-4e2b-aa74-198f20560fed tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Lock "interface-98efdafe-e02b-46ca-a701-b70042513128-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.163s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.510652] env[68569]: DEBUG nova.network.neutron [req-5bd9f757-7526-490c-ba01-4f1812929684 req-7cd8f058-a2ef-42cc-9d44-5f6a246cce3f service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Updated VIF entry in instance network info cache for port 8c249c42-adbc-4fe1-994f-1f00f6dfd6d1. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 778.510997] env[68569]: DEBUG nova.network.neutron [req-5bd9f757-7526-490c-ba01-4f1812929684 req-7cd8f058-a2ef-42cc-9d44-5f6a246cce3f service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Updating instance_info_cache with network_info: [{"id": "8c249c42-adbc-4fe1-994f-1f00f6dfd6d1", "address": "fa:16:3e:a4:0d:69", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c249c42-ad", "ovs_interfaceid": "8c249c42-adbc-4fe1-994f-1f00f6dfd6d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.634893] env[68569]: DEBUG nova.network.neutron [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Successfully updated port: 9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 778.689456] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4990030-c4bb-4b2e-ba18-b32de382b9c8 tempest-ServersTestBootFromVolume-1558881023 tempest-ServersTestBootFromVolume-1558881023-project-member] Lock "2cde3729-1be6-42c5-891f-42a7a8bff267" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.189s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.759913] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bde2b8-ae71-48d9-620c-48f2353a6204, 'name': SearchDatastore_Task, 'duration_secs': 0.011818} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.760630] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d3160f5-34cf-4a0d-9f9d-56b7f4638756 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.765959] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 778.765959] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52185118-11ec-c1b6-79ea-9ee164abaa21" [ 778.765959] env[68569]: _type = "Task" [ 778.765959] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.773678] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52185118-11ec-c1b6-79ea-9ee164abaa21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.785333] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Acquiring lock "98d5c760-6da3-49e3-af47-20a8054971f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.785579] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Lock "98d5c760-6da3-49e3-af47-20a8054971f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.785779] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Acquiring lock "98d5c760-6da3-49e3-af47-20a8054971f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.785953] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Lock "98d5c760-6da3-49e3-af47-20a8054971f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.786130] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Lock "98d5c760-6da3-49e3-af47-20a8054971f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.788034] env[68569]: INFO nova.compute.manager [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Terminating instance [ 779.013504] env[68569]: DEBUG oslo_concurrency.lockutils [req-5bd9f757-7526-490c-ba01-4f1812929684 req-7cd8f058-a2ef-42cc-9d44-5f6a246cce3f service nova] Releasing lock "refresh_cache-fd803a5e-8dbd-449e-b45d-1e6410a286e8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.013792] env[68569]: DEBUG nova.compute.manager [req-5bd9f757-7526-490c-ba01-4f1812929684 req-7cd8f058-a2ef-42cc-9d44-5f6a246cce3f service nova] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Received event network-vif-deleted-519d4e35-86dc-41ad-877e-d3d8f1145ccb {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 779.069666] env[68569]: DEBUG nova.compute.manager [req-bd5a1548-7242-4566-86a9-62c2c7734fb6 req-1754648c-acb0-44f3-b78b-befda631ccb5 service nova] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Received event network-vif-plugged-9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 779.069936] env[68569]: DEBUG oslo_concurrency.lockutils [req-bd5a1548-7242-4566-86a9-62c2c7734fb6 req-1754648c-acb0-44f3-b78b-befda631ccb5 service nova] Acquiring lock "492c0fa1-f821-496a-86c2-f7686479a733-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.070156] env[68569]: DEBUG oslo_concurrency.lockutils [req-bd5a1548-7242-4566-86a9-62c2c7734fb6 req-1754648c-acb0-44f3-b78b-befda631ccb5 service nova] Lock "492c0fa1-f821-496a-86c2-f7686479a733-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.070324] env[68569]: DEBUG oslo_concurrency.lockutils [req-bd5a1548-7242-4566-86a9-62c2c7734fb6 req-1754648c-acb0-44f3-b78b-befda631ccb5 service nova] Lock "492c0fa1-f821-496a-86c2-f7686479a733-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.070514] env[68569]: DEBUG nova.compute.manager [req-bd5a1548-7242-4566-86a9-62c2c7734fb6 req-1754648c-acb0-44f3-b78b-befda631ccb5 service nova] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] No waiting events found dispatching network-vif-plugged-9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 779.070741] env[68569]: WARNING nova.compute.manager [req-bd5a1548-7242-4566-86a9-62c2c7734fb6 req-1754648c-acb0-44f3-b78b-befda631ccb5 service nova] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Received unexpected event network-vif-plugged-9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0 for instance with vm_state building and task_state spawning. [ 779.138059] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "refresh_cache-492c0fa1-f821-496a-86c2-f7686479a733" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.138059] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "refresh_cache-492c0fa1-f821-496a-86c2-f7686479a733" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.138059] env[68569]: DEBUG nova.network.neutron [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 779.284467] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52185118-11ec-c1b6-79ea-9ee164abaa21, 'name': SearchDatastore_Task, 'duration_secs': 0.010819} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.288067] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.288067] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] fd803a5e-8dbd-449e-b45d-1e6410a286e8/fd803a5e-8dbd-449e-b45d-1e6410a286e8.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 779.288067] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e201224e-dda3-4f08-b85f-e9495f7f7530 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.292069] env[68569]: DEBUG nova.compute.manager [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 779.292164] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 779.293192] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08fcb82e-f211-4036-933c-cf659e48e0a6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.298322] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 779.298322] env[68569]: value = "task-3166930" [ 779.298322] env[68569]: _type = "Task" [ 779.298322] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.308587] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 779.309845] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9dc1ff92-fe2d-49e5-a3d5-5bb486f520a7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.316721] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3166930, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.324853] env[68569]: DEBUG oslo_vmware.api [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Waiting for the task: (returnval){ [ 779.324853] env[68569]: value = "task-3166931" [ 779.324853] env[68569]: _type = "Task" [ 779.324853] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.338337] env[68569]: DEBUG oslo_vmware.api [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166931, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.686883] env[68569]: DEBUG nova.network.neutron [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.690908] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9f096b-5c82-47da-9973-9eabb9750a42 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.698667] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668ea7c9-59ee-4558-97de-df4670963201 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.733782] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3f8d42-4d8a-494b-a45e-f00d84644136 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.745362] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ac8f36-71ec-444f-a645-a2db32b808a0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.762331] env[68569]: DEBUG nova.compute.provider_tree [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.813350] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3166930, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.834369] env[68569]: DEBUG oslo_vmware.api [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166931, 'name': PowerOffVM_Task, 'duration_secs': 0.173612} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.837173] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 779.837597] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 779.837669] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80d1e4a5-c229-4adf-b131-5368308d5ed1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.894539] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 779.894731] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 779.894908] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Deleting the datastore file [datastore2] 98d5c760-6da3-49e3-af47-20a8054971f3 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 779.895187] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa5a5ae5-93fa-47c2-8b9d-a4c7bb647a5e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.903508] env[68569]: DEBUG oslo_vmware.api [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Waiting for the task: (returnval){ [ 779.903508] env[68569]: value = "task-3166933" [ 779.903508] env[68569]: _type = "Task" [ 779.903508] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.913794] env[68569]: DEBUG oslo_vmware.api [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.991518] env[68569]: DEBUG nova.network.neutron [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Updating instance_info_cache with network_info: [{"id": "9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0", "address": "fa:16:3e:5a:85:85", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e78b86b-e1", "ovs_interfaceid": "9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.266364] env[68569]: DEBUG nova.scheduler.client.report [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 780.314304] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3166930, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547632} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.314636] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] fd803a5e-8dbd-449e-b45d-1e6410a286e8/fd803a5e-8dbd-449e-b45d-1e6410a286e8.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 780.314880] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 780.315185] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2377b054-7674-43fd-977f-ab694f0a9d94 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.324061] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 780.324061] env[68569]: value = "task-3166934" [ 780.324061] env[68569]: _type = "Task" [ 780.324061] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.337901] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3166934, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.413036] env[68569]: DEBUG oslo_vmware.api [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Task: {'id': task-3166933, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178038} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.414735] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 780.414943] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 780.415132] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 780.415302] env[68569]: INFO nova.compute.manager [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Took 1.12 seconds to destroy the instance on the hypervisor. [ 780.415575] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 780.416156] env[68569]: DEBUG nova.compute.manager [-] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 780.416254] env[68569]: DEBUG nova.network.neutron [-] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 780.494091] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "refresh_cache-492c0fa1-f821-496a-86c2-f7686479a733" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.494417] env[68569]: DEBUG nova.compute.manager [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Instance network_info: |[{"id": "9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0", "address": "fa:16:3e:5a:85:85", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e78b86b-e1", "ovs_interfaceid": "9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 780.496214] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:85:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db328342-7107-4bac-b1d6-111fbd5780f1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 780.507346] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 780.507602] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 780.509192] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4e086b9-fa2c-4b59-bf00-7f3d45edd9ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.529908] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 780.529908] env[68569]: value = "task-3166935" [ 780.529908] env[68569]: _type = "Task" [ 780.529908] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.538286] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166935, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.777548] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.616s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.778083] env[68569]: DEBUG nova.compute.manager [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 780.780939] env[68569]: DEBUG oslo_concurrency.lockutils [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.712s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.781363] env[68569]: DEBUG nova.objects.instance [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lazy-loading 'resources' on Instance uuid 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 780.784298] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Acquiring lock "98efdafe-e02b-46ca-a701-b70042513128" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.784691] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Lock "98efdafe-e02b-46ca-a701-b70042513128" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.784896] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Acquiring lock "98efdafe-e02b-46ca-a701-b70042513128-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.785033] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Lock "98efdafe-e02b-46ca-a701-b70042513128-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.785455] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Lock "98efdafe-e02b-46ca-a701-b70042513128-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.787787] env[68569]: INFO nova.compute.manager [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Terminating instance [ 780.838694] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3166934, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072868} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.839072] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 780.840340] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e2324a-20f5-453f-ae51-9bb60c9508ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.867497] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] fd803a5e-8dbd-449e-b45d-1e6410a286e8/fd803a5e-8dbd-449e-b45d-1e6410a286e8.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 780.868645] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff56eaaf-03de-484e-9a5a-2976101ad4b7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.890489] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 780.890489] env[68569]: value = "task-3166936" [ 780.890489] env[68569]: _type = "Task" [ 780.890489] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.903042] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3166936, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.047624] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166935, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.121371] env[68569]: DEBUG nova.compute.manager [req-039570e2-86b8-497b-9520-151a23749b4b req-9e17b3e3-b2d2-4a7a-a454-9f748460481d service nova] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Received event network-changed-9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 781.121371] env[68569]: DEBUG nova.compute.manager [req-039570e2-86b8-497b-9520-151a23749b4b req-9e17b3e3-b2d2-4a7a-a454-9f748460481d service nova] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Refreshing instance network info cache due to event network-changed-9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 781.121371] env[68569]: DEBUG oslo_concurrency.lockutils [req-039570e2-86b8-497b-9520-151a23749b4b req-9e17b3e3-b2d2-4a7a-a454-9f748460481d service nova] Acquiring lock "refresh_cache-492c0fa1-f821-496a-86c2-f7686479a733" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.121371] env[68569]: DEBUG oslo_concurrency.lockutils [req-039570e2-86b8-497b-9520-151a23749b4b req-9e17b3e3-b2d2-4a7a-a454-9f748460481d service nova] Acquired lock "refresh_cache-492c0fa1-f821-496a-86c2-f7686479a733" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.121371] env[68569]: DEBUG nova.network.neutron [req-039570e2-86b8-497b-9520-151a23749b4b req-9e17b3e3-b2d2-4a7a-a454-9f748460481d service nova] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Refreshing network info cache for port 9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 781.288309] env[68569]: DEBUG nova.compute.utils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 781.290193] env[68569]: DEBUG nova.compute.manager [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 781.291060] env[68569]: DEBUG nova.network.neutron [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 781.297018] env[68569]: DEBUG nova.compute.manager [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 781.297018] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 781.297018] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-044f1bdf-abdf-406a-acb6-76a2aa5a4b6f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.317157] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 781.317622] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd951c0d-8bea-4e9d-b458-824b26152e5d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.329743] env[68569]: DEBUG oslo_vmware.api [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Waiting for the task: (returnval){ [ 781.329743] env[68569]: value = "task-3166937" [ 781.329743] env[68569]: _type = "Task" [ 781.329743] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.343035] env[68569]: DEBUG oslo_vmware.api [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166937, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.349476] env[68569]: DEBUG nova.policy [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '715937edaba643339195f77bb00fe05d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '062b8ddb5f0d46d08425a66db32471be', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 781.404537] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3166936, 'name': ReconfigVM_Task, 'duration_secs': 0.493774} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.404537] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Reconfigured VM instance instance-00000030 to attach disk [datastore2] fd803a5e-8dbd-449e-b45d-1e6410a286e8/fd803a5e-8dbd-449e-b45d-1e6410a286e8.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 781.405036] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-afb4d0e6-7ef4-421f-8f31-9915efc451ac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.412085] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 781.412085] env[68569]: value = "task-3166938" [ 781.412085] env[68569]: _type = "Task" [ 781.412085] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.421041] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3166938, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.544780] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166935, 'name': CreateVM_Task, 'duration_secs': 0.851888} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.547236] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 781.552501] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.552693] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.553016] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 781.553310] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0786713-8e0d-4c57-a70d-8201ce443990 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.558229] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 781.558229] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52193bd1-ae12-d80d-475a-a0a0bdc9cdea" [ 781.558229] env[68569]: _type = "Task" [ 781.558229] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.573558] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52193bd1-ae12-d80d-475a-a0a0bdc9cdea, 'name': SearchDatastore_Task, 'duration_secs': 0.009902} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.573813] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.574054] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 781.574406] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.574550] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.575235] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 781.575235] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a5fed21-035a-40b3-b335-86b0913f4dbf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.585372] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 781.585562] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 781.586309] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-986c908b-91e5-4b4e-83fe-93f7ed0b0753 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.591918] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 781.591918] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52940a7c-51ed-2977-a161-45e5e416edec" [ 781.591918] env[68569]: _type = "Task" [ 781.591918] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.604222] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52940a7c-51ed-2977-a161-45e5e416edec, 'name': SearchDatastore_Task, 'duration_secs': 0.008744} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.604666] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69285329-3830-4712-8ae7-a0608db14238 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.609697] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 781.609697] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52095b24-fa0e-d13e-d310-436cff62a349" [ 781.609697] env[68569]: _type = "Task" [ 781.609697] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.622933] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52095b24-fa0e-d13e-d310-436cff62a349, 'name': SearchDatastore_Task, 'duration_secs': 0.00919} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.623251] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.623772] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 492c0fa1-f821-496a-86c2-f7686479a733/492c0fa1-f821-496a-86c2-f7686479a733.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 781.626318] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d6d73bb-fa16-47a0-bde8-16b5bee77c96 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.629235] env[68569]: DEBUG nova.network.neutron [-] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.635806] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 781.635806] env[68569]: value = "task-3166939" [ 781.635806] env[68569]: _type = "Task" [ 781.635806] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.645133] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.793803] env[68569]: DEBUG nova.compute.manager [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 781.846885] env[68569]: DEBUG oslo_vmware.api [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166937, 'name': PowerOffVM_Task, 'duration_secs': 0.214226} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.846885] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 781.846885] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 781.846885] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f6adf31-ccc3-4203-9625-270dd620cc41 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.922233] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 781.922463] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 781.922647] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Deleting the datastore file [datastore1] 98efdafe-e02b-46ca-a701-b70042513128 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 781.923280] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73f2fa85-7ebd-44d7-8981-ee9f502621f3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.930273] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3166938, 'name': Rename_Task, 'duration_secs': 0.148756} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.931102] env[68569]: DEBUG nova.network.neutron [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Successfully created port: aa644f82-a73f-492b-9432-6fce0cc9def1 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 781.933415] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 781.933631] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f13d7b42-4c48-40a4-a93e-4a30d3d146e7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.937376] env[68569]: DEBUG oslo_vmware.api [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Waiting for the task: (returnval){ [ 781.937376] env[68569]: value = "task-3166941" [ 781.937376] env[68569]: _type = "Task" [ 781.937376] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.942625] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 781.942625] env[68569]: value = "task-3166942" [ 781.942625] env[68569]: _type = "Task" [ 781.942625] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.952089] env[68569]: DEBUG oslo_vmware.api [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.961091] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3166942, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.983992] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a038f8-b9cb-40bc-bc3a-bb15c229a69c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.995200] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba02c0ca-1e69-4aaa-892f-decc715fbc7f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.038691] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ee3f3c-949e-4cb3-bf06-120be1eda547 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.048992] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9192b33-7f9b-4dc2-b5ac-06c165e24084 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.070324] env[68569]: DEBUG nova.compute.provider_tree [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.131309] env[68569]: INFO nova.compute.manager [-] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Took 1.71 seconds to deallocate network for instance. [ 782.146351] env[68569]: DEBUG nova.network.neutron [req-039570e2-86b8-497b-9520-151a23749b4b req-9e17b3e3-b2d2-4a7a-a454-9f748460481d service nova] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Updated VIF entry in instance network info cache for port 9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 782.146832] env[68569]: DEBUG nova.network.neutron [req-039570e2-86b8-497b-9520-151a23749b4b req-9e17b3e3-b2d2-4a7a-a454-9f748460481d service nova] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Updating instance_info_cache with network_info: [{"id": "9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0", "address": "fa:16:3e:5a:85:85", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e78b86b-e1", "ovs_interfaceid": "9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.153981] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166939, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499656} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.154233] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 492c0fa1-f821-496a-86c2-f7686479a733/492c0fa1-f821-496a-86c2-f7686479a733.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 782.154464] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 782.154666] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bfda7fae-678c-45e3-ab4e-d8b8203f5db0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.160875] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 782.160875] env[68569]: value = "task-3166943" [ 782.160875] env[68569]: _type = "Task" [ 782.160875] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.172789] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166943, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.462630] env[68569]: DEBUG oslo_vmware.api [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Task: {'id': task-3166941, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280963} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.462931] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3166942, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.463134] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 782.463317] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 782.463484] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 782.464090] env[68569]: INFO nova.compute.manager [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Took 1.17 seconds to destroy the instance on the hypervisor. [ 782.464090] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 782.464193] env[68569]: DEBUG nova.compute.manager [-] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 782.464229] env[68569]: DEBUG nova.network.neutron [-] [instance: 98efdafe-e02b-46ca-a701-b70042513128] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 782.576460] env[68569]: DEBUG nova.scheduler.client.report [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 782.647560] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.649556] env[68569]: DEBUG oslo_concurrency.lockutils [req-039570e2-86b8-497b-9520-151a23749b4b req-9e17b3e3-b2d2-4a7a-a454-9f748460481d service nova] Releasing lock "refresh_cache-492c0fa1-f821-496a-86c2-f7686479a733" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.670873] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166943, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.807651] env[68569]: DEBUG nova.compute.manager [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 782.833365] env[68569]: DEBUG nova.virt.hardware [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 782.833589] env[68569]: DEBUG nova.virt.hardware [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 782.833769] env[68569]: DEBUG nova.virt.hardware [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 782.833953] env[68569]: DEBUG nova.virt.hardware [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 782.834719] env[68569]: DEBUG nova.virt.hardware [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 782.834914] env[68569]: DEBUG nova.virt.hardware [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 782.835137] env[68569]: DEBUG nova.virt.hardware [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 782.835300] env[68569]: DEBUG nova.virt.hardware [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 782.835468] env[68569]: DEBUG nova.virt.hardware [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 782.835626] env[68569]: DEBUG nova.virt.hardware [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 782.835793] env[68569]: DEBUG nova.virt.hardware [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 782.837206] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581bb6ed-7479-45e4-b3fd-5970583781e5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.844745] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b655ad6-e5d3-48aa-8ad1-a10e253c6b52 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.959088] env[68569]: DEBUG oslo_vmware.api [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3166942, 'name': PowerOnVM_Task, 'duration_secs': 0.730726} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.959088] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 782.961212] env[68569]: INFO nova.compute.manager [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Took 9.12 seconds to spawn the instance on the hypervisor. [ 782.961313] env[68569]: DEBUG nova.compute.manager [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 782.962442] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5403c9aa-e30e-47e1-be3c-566d6c32b415 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.085041] env[68569]: DEBUG oslo_concurrency.lockutils [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.304s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.089403] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.313s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.089671] env[68569]: DEBUG nova.objects.instance [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Lazy-loading 'resources' on Instance uuid 16b6fafe-524d-482f-961b-10e3601ac4c2 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 783.120627] env[68569]: INFO nova.scheduler.client.report [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Deleted allocations for instance 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155 [ 783.176396] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166943, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.60603} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.179870] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 783.179870] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253a2977-591c-4c03-adff-91be642977c0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.197185] env[68569]: DEBUG nova.compute.manager [req-f9c98948-50d0-4a6c-a551-cc9783b0024f req-70b8b0ff-56bd-4643-a148-53217534feaf service nova] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Received event network-vif-deleted-ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 783.197394] env[68569]: INFO nova.compute.manager [req-f9c98948-50d0-4a6c-a551-cc9783b0024f req-70b8b0ff-56bd-4643-a148-53217534feaf service nova] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Neutron deleted interface ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb; detaching it from the instance and deleting it from the info cache [ 783.197619] env[68569]: DEBUG nova.network.neutron [req-f9c98948-50d0-4a6c-a551-cc9783b0024f req-70b8b0ff-56bd-4643-a148-53217534feaf service nova] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.207583] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] 492c0fa1-f821-496a-86c2-f7686479a733/492c0fa1-f821-496a-86c2-f7686479a733.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 783.208726] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fb1e9f0-64f2-4cee-8284-8519a119d3b7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.230391] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 783.230391] env[68569]: value = "task-3166944" [ 783.230391] env[68569]: _type = "Task" [ 783.230391] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.241207] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166944, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.275291] env[68569]: DEBUG nova.network.neutron [-] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.381268] env[68569]: DEBUG nova.compute.manager [req-ee745ff0-9bed-401c-8b7d-527aeac87dd2 req-d71bf168-d3fe-4722-ad59-1f15af8dfa01 service nova] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Received event network-vif-deleted-bad77068-318c-4c21-8d0f-74d1c5d8da7b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 783.484886] env[68569]: INFO nova.compute.manager [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Took 47.03 seconds to build instance. [ 783.601455] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Acquiring lock "a63b06a1-c24e-4013-a1f4-b227732a1e05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.601830] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Lock "a63b06a1-c24e-4013-a1f4-b227732a1e05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.631070] env[68569]: DEBUG oslo_concurrency.lockutils [None req-322cf3ba-6f71-4be5-ae10-2adcd5684f43 tempest-SecurityGroupsTestJSON-456876085 tempest-SecurityGroupsTestJSON-456876085-project-member] Lock "5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.017s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.709681] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce78a52b-553f-46a2-9d58-fcff66555310 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.721683] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72b1f3e-211a-4b22-a6e2-6db28e7934e4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.747746] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166944, 'name': ReconfigVM_Task, 'duration_secs': 0.277419} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.759276] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Reconfigured VM instance instance-00000031 to attach disk [datastore2] 492c0fa1-f821-496a-86c2-f7686479a733/492c0fa1-f821-496a-86c2-f7686479a733.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 783.759276] env[68569]: DEBUG nova.compute.manager [req-f9c98948-50d0-4a6c-a551-cc9783b0024f req-70b8b0ff-56bd-4643-a148-53217534feaf service nova] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Detach interface failed, port_id=ba6eee62-7dee-4ec2-a824-fe2c5b16a1bb, reason: Instance 98efdafe-e02b-46ca-a701-b70042513128 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 783.761875] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-48e325d5-9852-4b2f-bc6b-338aaf8bc227 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.769223] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 783.769223] env[68569]: value = "task-3166945" [ 783.769223] env[68569]: _type = "Task" [ 783.769223] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.780545] env[68569]: INFO nova.compute.manager [-] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Took 1.32 seconds to deallocate network for instance. [ 783.780890] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166945, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.877034] env[68569]: DEBUG nova.network.neutron [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Successfully updated port: aa644f82-a73f-492b-9432-6fce0cc9def1 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 783.989115] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bfbd0898-a637-4d22-bf11-3d162290dd43 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.968s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.141403] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd08ecbc-17dd-40c8-889a-533f63a62f7a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.153156] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4311b0b0-9786-42a8-a277-c4a9e37822b3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.190238] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95760c34-5ab1-4418-8275-64495baa043b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.202063] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf79fd5-c2bd-4739-a964-31d2de2a910e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.218230] env[68569]: DEBUG nova.compute.provider_tree [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.284755] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166945, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.287867] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.381861] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "refresh_cache-1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.382038] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquired lock "refresh_cache-1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.382198] env[68569]: DEBUG nova.network.neutron [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 784.490850] env[68569]: DEBUG nova.compute.manager [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 784.716436] env[68569]: DEBUG nova.scheduler.client.report [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 784.791888] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166945, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.940302] env[68569]: DEBUG nova.network.neutron [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.015279] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.215177] env[68569]: DEBUG nova.network.neutron [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Updating instance_info_cache with network_info: [{"id": "aa644f82-a73f-492b-9432-6fce0cc9def1", "address": "fa:16:3e:5b:27:1c", "network": {"id": "7738c508-0306-4079-a892-0554c562ce54", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1732251555-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "062b8ddb5f0d46d08425a66db32471be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa644f82-a7", "ovs_interfaceid": "aa644f82-a73f-492b-9432-6fce0cc9def1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.224506] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.135s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.229867] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.019s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.229867] env[68569]: DEBUG nova.objects.instance [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lazy-loading 'resources' on Instance uuid c56e4282-b1ca-42f5-b346-692779475df0 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 785.261140] env[68569]: INFO nova.scheduler.client.report [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Deleted allocations for instance 16b6fafe-524d-482f-961b-10e3601ac4c2 [ 785.287339] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166945, 'name': Rename_Task, 'duration_secs': 1.144306} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.287921] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 785.288378] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-adf7544c-60ed-4c31-94fa-ad99ea99e306 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.298664] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 785.298664] env[68569]: value = "task-3166946" [ 785.298664] env[68569]: _type = "Task" [ 785.298664] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.307645] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166946, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.421710] env[68569]: DEBUG nova.compute.manager [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Received event network-vif-plugged-aa644f82-a73f-492b-9432-6fce0cc9def1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 785.421710] env[68569]: DEBUG oslo_concurrency.lockutils [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] Acquiring lock "1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.421710] env[68569]: DEBUG oslo_concurrency.lockutils [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] Lock "1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.421710] env[68569]: DEBUG oslo_concurrency.lockutils [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] Lock "1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.421710] env[68569]: DEBUG nova.compute.manager [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] No waiting events found dispatching network-vif-plugged-aa644f82-a73f-492b-9432-6fce0cc9def1 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 785.422381] env[68569]: WARNING nova.compute.manager [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Received unexpected event network-vif-plugged-aa644f82-a73f-492b-9432-6fce0cc9def1 for instance with vm_state building and task_state spawning. [ 785.422381] env[68569]: DEBUG nova.compute.manager [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Received event network-changed-aa644f82-a73f-492b-9432-6fce0cc9def1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 785.422381] env[68569]: DEBUG nova.compute.manager [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Refreshing instance network info cache due to event network-changed-aa644f82-a73f-492b-9432-6fce0cc9def1. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 785.422381] env[68569]: DEBUG oslo_concurrency.lockutils [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] Acquiring lock "refresh_cache-1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.721486] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Releasing lock "refresh_cache-1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.725109] env[68569]: DEBUG nova.compute.manager [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Instance network_info: |[{"id": "aa644f82-a73f-492b-9432-6fce0cc9def1", "address": "fa:16:3e:5b:27:1c", "network": {"id": "7738c508-0306-4079-a892-0554c562ce54", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1732251555-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "062b8ddb5f0d46d08425a66db32471be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa644f82-a7", "ovs_interfaceid": "aa644f82-a73f-492b-9432-6fce0cc9def1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 785.725109] env[68569]: DEBUG oslo_concurrency.lockutils [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] Acquired lock "refresh_cache-1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.725508] env[68569]: DEBUG nova.network.neutron [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Refreshing network info cache for port aa644f82-a73f-492b-9432-6fce0cc9def1 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 785.725508] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:27:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f52a458-d157-48a3-b4e2-b8cc0779afe2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa644f82-a73f-492b-9432-6fce0cc9def1', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 785.737065] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 785.742930] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 785.743451] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf14b2f4-8b75-4d0a-8abf-d0245af7ce20 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.771464] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3a0adaf3-1f5f-4811-a3e3-4f6f10aab79b tempest-ServerExternalEventsTest-149397534 tempest-ServerExternalEventsTest-149397534-project-member] Lock "16b6fafe-524d-482f-961b-10e3601ac4c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.580s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.773031] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 785.773031] env[68569]: value = "task-3166947" [ 785.773031] env[68569]: _type = "Task" [ 785.773031] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.785968] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166947, 'name': CreateVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.812695] env[68569]: DEBUG oslo_vmware.api [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166946, 'name': PowerOnVM_Task, 'duration_secs': 0.483503} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.812695] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 785.812877] env[68569]: INFO nova.compute.manager [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Took 7.83 seconds to spawn the instance on the hypervisor. [ 785.813013] env[68569]: DEBUG nova.compute.manager [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 785.813729] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff3477e-6d0e-41fe-bfe1-05daf5b017f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.263025] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7fe570-6849-4a0b-be06-fc81d8d1ac7e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.274077] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5481f4c-afe5-4450-98f7-5473a532bccf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.286683] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166947, 'name': CreateVM_Task, 'duration_secs': 0.321904} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.314735] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 786.316028] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.316141] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.316431] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 786.317232] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a413912-f2a1-4b13-9137-fe447194bfbd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.319939] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72100368-bfca-4ce2-b1e3-b40a74b8c28e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.325890] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 786.325890] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524021b3-c2bd-d109-910a-9b83e39d044b" [ 786.325890] env[68569]: _type = "Task" [ 786.325890] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.336463] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ebe7f5-bde9-45b5-b9b3-c8ae5d3c612d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.346899] env[68569]: INFO nova.compute.manager [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Took 48.05 seconds to build instance. [ 786.361084] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524021b3-c2bd-d109-910a-9b83e39d044b, 'name': SearchDatastore_Task, 'duration_secs': 0.012459} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.361642] env[68569]: DEBUG nova.compute.provider_tree [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.363761] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.363978] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 786.364280] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.364479] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.364619] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 786.365441] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d7e3390-0195-427e-99c3-03c05d565263 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.376559] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 786.376926] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 786.379354] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d787f6c-2c69-41f2-8e1a-a0f9da0ef8fa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.383887] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 786.383887] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527f58b9-a08b-4e50-64a0-101a02001d6e" [ 786.383887] env[68569]: _type = "Task" [ 786.383887] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.391670] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527f58b9-a08b-4e50-64a0-101a02001d6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.542372] env[68569]: DEBUG nova.network.neutron [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Updated VIF entry in instance network info cache for port aa644f82-a73f-492b-9432-6fce0cc9def1. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 786.542737] env[68569]: DEBUG nova.network.neutron [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Updating instance_info_cache with network_info: [{"id": "aa644f82-a73f-492b-9432-6fce0cc9def1", "address": "fa:16:3e:5b:27:1c", "network": {"id": "7738c508-0306-4079-a892-0554c562ce54", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1732251555-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "062b8ddb5f0d46d08425a66db32471be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa644f82-a7", "ovs_interfaceid": "aa644f82-a73f-492b-9432-6fce0cc9def1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.863696] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b7900d1b-e5f2-4523-aa23-6aa2e4e67e82 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "492c0fa1-f821-496a-86c2-f7686479a733" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.813s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.865962] env[68569]: DEBUG nova.scheduler.client.report [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 786.896305] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527f58b9-a08b-4e50-64a0-101a02001d6e, 'name': SearchDatastore_Task, 'duration_secs': 0.008532} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.897134] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b110202f-d316-4a86-944d-e8414e7483e9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.902445] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 786.902445] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52183cf0-34e6-35ef-d362-84f9553e0f0e" [ 786.902445] env[68569]: _type = "Task" [ 786.902445] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.910138] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52183cf0-34e6-35ef-d362-84f9553e0f0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.045807] env[68569]: DEBUG oslo_concurrency.lockutils [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] Releasing lock "refresh_cache-1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.046325] env[68569]: DEBUG nova.compute.manager [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Received event network-changed-8c249c42-adbc-4fe1-994f-1f00f6dfd6d1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 787.046325] env[68569]: DEBUG nova.compute.manager [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Refreshing instance network info cache due to event network-changed-8c249c42-adbc-4fe1-994f-1f00f6dfd6d1. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 787.046495] env[68569]: DEBUG oslo_concurrency.lockutils [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] Acquiring lock "refresh_cache-fd803a5e-8dbd-449e-b45d-1e6410a286e8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.046607] env[68569]: DEBUG oslo_concurrency.lockutils [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] Acquired lock "refresh_cache-fd803a5e-8dbd-449e-b45d-1e6410a286e8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.046769] env[68569]: DEBUG nova.network.neutron [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Refreshing network info cache for port 8c249c42-adbc-4fe1-994f-1f00f6dfd6d1 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 787.366799] env[68569]: DEBUG nova.compute.manager [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 787.370021] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.142s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.375177] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.842s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.375416] env[68569]: DEBUG nova.objects.instance [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lazy-loading 'resources' on Instance uuid 0c4d4d93-89bf-4164-973b-af48278a3915 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 787.416164] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52183cf0-34e6-35ef-d362-84f9553e0f0e, 'name': SearchDatastore_Task, 'duration_secs': 0.009999} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.419165] env[68569]: INFO nova.scheduler.client.report [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Deleted allocations for instance c56e4282-b1ca-42f5-b346-692779475df0 [ 787.419165] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.419165] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6/1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 787.419165] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-17ff3986-2477-46db-a99d-f76f19d1f5cb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.429834] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 787.429834] env[68569]: value = "task-3166948" [ 787.429834] env[68569]: _type = "Task" [ 787.429834] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.444410] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166948, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.871514] env[68569]: DEBUG nova.network.neutron [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Updated VIF entry in instance network info cache for port 8c249c42-adbc-4fe1-994f-1f00f6dfd6d1. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 787.872165] env[68569]: DEBUG nova.network.neutron [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Updating instance_info_cache with network_info: [{"id": "8c249c42-adbc-4fe1-994f-1f00f6dfd6d1", "address": "fa:16:3e:a4:0d:69", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c249c42-ad", "ovs_interfaceid": "8c249c42-adbc-4fe1-994f-1f00f6dfd6d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.898789] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.927942] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac608da5-5af6-4ae0-90b7-355505e98da6 tempest-ListImageFiltersTestJSON-218439547 tempest-ListImageFiltersTestJSON-218439547-project-member] Lock "c56e4282-b1ca-42f5-b346-692779475df0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.250s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.944826] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166948, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.4962} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.945846] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6/1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 787.945846] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 787.945846] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ea104e4-0f03-4e2b-a08a-56a5931dcd7e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.952113] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 787.952113] env[68569]: value = "task-3166949" [ 787.952113] env[68569]: _type = "Task" [ 787.952113] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.963795] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166949, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.378931] env[68569]: DEBUG oslo_concurrency.lockutils [req-44cc40f5-685e-4ffb-987a-33263dc0949e req-3d88fc4f-ee46-41bd-91ab-9cae822fcbbd service nova] Releasing lock "refresh_cache-fd803a5e-8dbd-449e-b45d-1e6410a286e8" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.383976] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "492c0fa1-f821-496a-86c2-f7686479a733" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.384231] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "492c0fa1-f821-496a-86c2-f7686479a733" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.415224] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a614a4ea-aecf-4599-b905-25286e5823a1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.422214] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2367afb4-31ea-4867-a080-b2aa45aaec94 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.461646] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ed7358-45b7-49cd-829e-daf0fe485926 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.469965] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166949, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082931} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.472380] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 788.473144] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1576e4fd-0583-4d97-b08c-39c773aca203 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.476624] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d189c1-2fe7-4add-bc5a-dd92f5ea7ffe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.505308] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6/1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 788.513696] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d96599e-6fe6-46c5-9103-1d3e30e58393 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.528669] env[68569]: DEBUG nova.compute.provider_tree [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 788.536646] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 788.536646] env[68569]: value = "task-3166950" [ 788.536646] env[68569]: _type = "Task" [ 788.536646] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.545563] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166950, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.891541] env[68569]: DEBUG nova.compute.utils [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 789.057705] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.067116] env[68569]: ERROR nova.scheduler.client.report [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [req-1f4e7f8c-8097-4428-b6df-d09a28db5ac8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1f4e7f8c-8097-4428-b6df-d09a28db5ac8"}]} [ 789.083049] env[68569]: DEBUG nova.scheduler.client.report [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 789.110734] env[68569]: DEBUG nova.scheduler.client.report [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 789.110870] env[68569]: DEBUG nova.compute.provider_tree [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 789.123970] env[68569]: DEBUG nova.scheduler.client.report [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 789.149017] env[68569]: DEBUG nova.scheduler.client.report [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 789.394403] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "492c0fa1-f821-496a-86c2-f7686479a733" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.550070] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166950, 'name': ReconfigVM_Task, 'duration_secs': 0.86952} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.550370] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6/1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 789.551080] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-311afad8-5357-4998-a5b2-c14199c22a9e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.561129] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 789.561129] env[68569]: value = "task-3166951" [ 789.561129] env[68569]: _type = "Task" [ 789.561129] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.569520] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166951, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.756576] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb95d55-1110-465a-8f23-162ab1f6980f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.764312] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771b03ff-5d7f-438c-ba1b-f066b7a275f3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.794204] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0a3041-b47e-41dc-8782-95cde4d4634e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.801424] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d716a769-5853-49ab-9639-1efa2419b8b6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.814992] env[68569]: DEBUG nova.compute.provider_tree [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 790.072966] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166951, 'name': Rename_Task, 'duration_secs': 0.353217} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.074132] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 790.074132] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-051d5e07-972b-4361-9abb-4bce93321a18 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.081135] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 790.081135] env[68569]: value = "task-3166952" [ 790.081135] env[68569]: _type = "Task" [ 790.081135] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.092816] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166952, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.364664] env[68569]: DEBUG nova.scheduler.client.report [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 81 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 790.364898] env[68569]: DEBUG nova.compute.provider_tree [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 81 to 82 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 790.365205] env[68569]: DEBUG nova.compute.provider_tree [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 790.485923] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "492c0fa1-f821-496a-86c2-f7686479a733" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.488646] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "492c0fa1-f821-496a-86c2-f7686479a733" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.488646] env[68569]: INFO nova.compute.manager [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Attaching volume fe9cc839-3100-4968-9dac-3e4881da0f59 to /dev/sdb [ 790.532029] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048dcece-d620-4c09-a567-522eca149a76 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.537846] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1537f2-71e9-4ae5-b03b-7bbb8199be9f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.553247] env[68569]: DEBUG nova.virt.block_device [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Updating existing volume attachment record: 1ecc0f81-27aa-4e76-ae90-a23bd7dd2850 {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 790.592561] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166952, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.871809] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.497s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.875242] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.210s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.875242] env[68569]: DEBUG nova.objects.instance [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Lazy-loading 'resources' on Instance uuid 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 790.914236] env[68569]: INFO nova.scheduler.client.report [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Deleted allocations for instance 0c4d4d93-89bf-4164-973b-af48278a3915 [ 791.094739] env[68569]: DEBUG oslo_vmware.api [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166952, 'name': PowerOnVM_Task, 'duration_secs': 0.865315} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.095238] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 791.095422] env[68569]: INFO nova.compute.manager [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Took 8.29 seconds to spawn the instance on the hypervisor. [ 791.095611] env[68569]: DEBUG nova.compute.manager [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 791.096452] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765ea4f5-afc1-4a7e-a835-9bef11e77328 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.438130] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0e0ef85-8254-403c-bdd6-0d951c20b31d tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "0c4d4d93-89bf-4164-973b-af48278a3915" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.308s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.621951] env[68569]: INFO nova.compute.manager [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Took 47.52 seconds to build instance. [ 791.960720] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f46e64-23fc-45c9-ac97-7509136bad29 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.969699] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a81f45-f66d-4f4a-a273-180e613e1f58 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.007533] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947cdd01-6cfa-4a18-88ed-b114cf1c6ce4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.021651] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26d2ca0-2cc6-403a-baf5-b6c396870e6e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.037621] env[68569]: DEBUG nova.compute.provider_tree [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.124218] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04bb4446-c042-4bce-af8b-980471dd765c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.023s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.540617] env[68569]: DEBUG nova.scheduler.client.report [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 792.627286] env[68569]: DEBUG nova.compute.manager [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 792.658935] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "6824efd5-427b-420d-83d5-a1d5acd94bf9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.659943] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "6824efd5-427b-420d-83d5-a1d5acd94bf9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.046815] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.172s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.051034] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.638s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.052423] env[68569]: INFO nova.compute.claims [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 793.073595] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "eec09a1c-e8b2-4b6a-9545-e190e1f965d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.073866] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "eec09a1c-e8b2-4b6a-9545-e190e1f965d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.074090] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "eec09a1c-e8b2-4b6a-9545-e190e1f965d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.074340] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "eec09a1c-e8b2-4b6a-9545-e190e1f965d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.074427] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "eec09a1c-e8b2-4b6a-9545-e190e1f965d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.079277] env[68569]: INFO nova.compute.manager [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Terminating instance [ 793.087661] env[68569]: INFO nova.scheduler.client.report [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Deleted allocations for instance 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28 [ 793.164697] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.389589] env[68569]: DEBUG nova.compute.manager [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 793.390536] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a57d498-a613-4dfc-8976-5c501eb57607 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.494671] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquiring lock "f84204a9-aeea-498e-9682-298e581b34e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.495105] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Lock "f84204a9-aeea-498e-9682-298e581b34e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.587670] env[68569]: DEBUG nova.compute.manager [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 793.587907] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 793.588971] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a436f5d-db4b-41a4-ae7e-ab88a298a0bb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.604040] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 793.604430] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9399947d-fdaf-4eac-9786-e925235090ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.607734] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f4d6d31-ae84-42cf-b502-85912f07192d tempest-VolumesAssistedSnapshotsTest-1928946124 tempest-VolumesAssistedSnapshotsTest-1928946124-project-member] Lock "50b9775c-ddbd-4e8f-a2b8-b08c3028fc28" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.663s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.616637] env[68569]: DEBUG oslo_vmware.api [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 793.616637] env[68569]: value = "task-3166957" [ 793.616637] env[68569]: _type = "Task" [ 793.616637] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.629817] env[68569]: DEBUG oslo_vmware.api [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166957, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.816943] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Acquiring lock "dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.817515] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Lock "dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.911582] env[68569]: INFO nova.compute.manager [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] instance snapshotting [ 793.916481] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd504467-aef6-4e81-88c1-4f58d703c6d4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.937208] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79b11ff-233c-491e-abcf-8c060d129594 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.129897] env[68569]: DEBUG oslo_vmware.api [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166957, 'name': PowerOffVM_Task, 'duration_secs': 0.198862} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.133724] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 794.133724] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 794.133724] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d1ab3b1-9d7e-46b3-b7b5-d67d6b650f3d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.206326] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 794.206326] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 794.206326] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Deleting the datastore file [datastore1] eec09a1c-e8b2-4b6a-9545-e190e1f965d1 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 794.206326] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7f26a3d-cdea-4860-9c11-8f11b1070158 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.215436] env[68569]: DEBUG oslo_vmware.api [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 794.215436] env[68569]: value = "task-3166959" [ 794.215436] env[68569]: _type = "Task" [ 794.215436] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.228511] env[68569]: DEBUG oslo_vmware.api [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166959, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.450368] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 794.450368] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f919ad9c-a9e2-4a45-838f-c68cac0e04de {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.461018] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 794.461018] env[68569]: value = "task-3166960" [ 794.461018] env[68569]: _type = "Task" [ 794.461018] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.473959] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166960, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.627596] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b62ee428-ae51-451d-81e2-1c28ce5ace71 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.636282] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d80048c-cba3-4423-956e-c12e9918d5ce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.672341] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f102a686-914f-4b89-a43b-ba50ecd6e183 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.680490] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7776d1a-d811-4c62-a4fc-4f74079cb707 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.696088] env[68569]: DEBUG nova.compute.provider_tree [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 794.726186] env[68569]: DEBUG oslo_vmware.api [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3166959, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143258} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.726444] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 794.726627] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 794.726799] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 794.727035] env[68569]: INFO nova.compute.manager [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Took 1.14 seconds to destroy the instance on the hypervisor. [ 794.727298] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 794.727419] env[68569]: DEBUG nova.compute.manager [-] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 794.727513] env[68569]: DEBUG nova.network.neutron [-] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 794.974466] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166960, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.206026] env[68569]: DEBUG nova.compute.manager [req-db34781d-662e-4840-aa69-dc06aa685727 req-925583d1-9a59-4b36-93be-c7909cb59283 service nova] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Received event network-vif-deleted-77dcd96c-5db6-4974-8c32-59be867fda30 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 795.206026] env[68569]: INFO nova.compute.manager [req-db34781d-662e-4840-aa69-dc06aa685727 req-925583d1-9a59-4b36-93be-c7909cb59283 service nova] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Neutron deleted interface 77dcd96c-5db6-4974-8c32-59be867fda30; detaching it from the instance and deleting it from the info cache [ 795.206026] env[68569]: DEBUG nova.network.neutron [req-db34781d-662e-4840-aa69-dc06aa685727 req-925583d1-9a59-4b36-93be-c7909cb59283 service nova] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.223197] env[68569]: ERROR nova.scheduler.client.report [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [req-4fdabd9f-7b64-4988-987b-1f4690f574a1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4fdabd9f-7b64-4988-987b-1f4690f574a1"}]} [ 795.239317] env[68569]: DEBUG nova.scheduler.client.report [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 795.252803] env[68569]: DEBUG nova.scheduler.client.report [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 795.253034] env[68569]: DEBUG nova.compute.provider_tree [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 795.267294] env[68569]: DEBUG nova.scheduler.client.report [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 795.299196] env[68569]: DEBUG nova.scheduler.client.report [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 795.479696] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166960, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.612202] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 795.613381] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633576', 'volume_id': 'fe9cc839-3100-4968-9dac-3e4881da0f59', 'name': 'volume-fe9cc839-3100-4968-9dac-3e4881da0f59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '492c0fa1-f821-496a-86c2-f7686479a733', 'attached_at': '', 'detached_at': '', 'volume_id': 'fe9cc839-3100-4968-9dac-3e4881da0f59', 'serial': 'fe9cc839-3100-4968-9dac-3e4881da0f59'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 795.614314] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490cda7b-1e1c-4090-9ac3-586d5bba85db {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.638336] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7de7dc7-9acd-41f4-a17d-5bf20ee54117 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.668616] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] volume-fe9cc839-3100-4968-9dac-3e4881da0f59/volume-fe9cc839-3100-4968-9dac-3e4881da0f59.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 795.671344] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3051ba33-3f24-4af8-bfa1-7a4c573fe490 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.686681] env[68569]: DEBUG nova.network.neutron [-] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.693615] env[68569]: DEBUG oslo_vmware.api [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 795.693615] env[68569]: value = "task-3166961" [ 795.693615] env[68569]: _type = "Task" [ 795.693615] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.705051] env[68569]: DEBUG oslo_vmware.api [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166961, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.713837] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bddc8ca9-5f5d-4ce7-bedc-d65cbe11f228 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.726455] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3cbb9f-2d35-4362-900b-240c066430a9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.759232] env[68569]: DEBUG nova.compute.manager [req-db34781d-662e-4840-aa69-dc06aa685727 req-925583d1-9a59-4b36-93be-c7909cb59283 service nova] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Detach interface failed, port_id=77dcd96c-5db6-4974-8c32-59be867fda30, reason: Instance eec09a1c-e8b2-4b6a-9545-e190e1f965d1 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 795.963858] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f856ca-9cd9-473e-a0bb-916a23bb833e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.983178] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4775c83b-26f5-4628-8f8b-3216df821a83 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.989669] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166960, 'name': CreateSnapshot_Task, 'duration_secs': 1.253745} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.990350] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 795.991594] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35995d99-f463-4c34-904a-f82271585c27 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.024340] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0328d570-b5f5-4d25-a8d9-706d6c0c1f6e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.037827] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c874768-8e48-494a-8965-7026d1f7ed86 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.052980] env[68569]: DEBUG nova.compute.provider_tree [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 796.189432] env[68569]: INFO nova.compute.manager [-] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Took 1.46 seconds to deallocate network for instance. [ 796.203748] env[68569]: DEBUG oslo_vmware.api [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.540980] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 796.541350] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-315cdd2e-6403-4da7-9e78-fe22d2df93a0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.549604] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 796.549604] env[68569]: value = "task-3166962" [ 796.549604] env[68569]: _type = "Task" [ 796.549604] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.560539] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166962, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.597500] env[68569]: DEBUG nova.scheduler.client.report [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 85 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 796.597854] env[68569]: DEBUG nova.compute.provider_tree [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 85 to 86 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 796.598128] env[68569]: DEBUG nova.compute.provider_tree [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 796.699860] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.705228] env[68569]: DEBUG oslo_vmware.api [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166961, 'name': ReconfigVM_Task, 'duration_secs': 0.969427} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.705510] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Reconfigured VM instance instance-00000031 to attach disk [datastore1] volume-fe9cc839-3100-4968-9dac-3e4881da0f59/volume-fe9cc839-3100-4968-9dac-3e4881da0f59.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 796.710473] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3f4f0f5-d315-4296-957d-890fb4dda0c9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.725123] env[68569]: DEBUG oslo_vmware.api [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 796.725123] env[68569]: value = "task-3166963" [ 796.725123] env[68569]: _type = "Task" [ 796.725123] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.734767] env[68569]: DEBUG oslo_vmware.api [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166963, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.060084] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166962, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.108103] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.057s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.108694] env[68569]: DEBUG nova.compute.manager [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 797.117208] env[68569]: DEBUG oslo_concurrency.lockutils [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.487s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.235134] env[68569]: DEBUG oslo_vmware.api [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166963, 'name': ReconfigVM_Task, 'duration_secs': 0.306948} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.238204] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633576', 'volume_id': 'fe9cc839-3100-4968-9dac-3e4881da0f59', 'name': 'volume-fe9cc839-3100-4968-9dac-3e4881da0f59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '492c0fa1-f821-496a-86c2-f7686479a733', 'attached_at': '', 'detached_at': '', 'volume_id': 'fe9cc839-3100-4968-9dac-3e4881da0f59', 'serial': 'fe9cc839-3100-4968-9dac-3e4881da0f59'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 797.562891] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166962, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.574393] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0a9e68-baf8-4642-a42d-be459449329a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.581311] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455b9a03-b68a-4dfe-831e-1c1b6c507edb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.610379] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4a2e1c-c95c-4e71-b42d-00d6362033d9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.618616] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec010a9d-4058-4370-91f0-0a177fe65110 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.623526] env[68569]: DEBUG nova.compute.utils [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 797.624955] env[68569]: DEBUG nova.compute.manager [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Not allocating networking since 'none' was specified. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 797.638044] env[68569]: DEBUG nova.compute.provider_tree [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.063714] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166962, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.126616] env[68569]: DEBUG nova.compute.manager [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 798.141852] env[68569]: DEBUG nova.scheduler.client.report [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 798.288254] env[68569]: DEBUG nova.objects.instance [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lazy-loading 'flavor' on Instance uuid 492c0fa1-f821-496a-86c2-f7686479a733 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.566335] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166962, 'name': CloneVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.648477] env[68569]: DEBUG oslo_concurrency.lockutils [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.533s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.648760] env[68569]: INFO nova.compute.manager [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Successfully reverted task state from resize_migrating on failure for instance. [ 798.655903] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.955s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.656168] env[68569]: DEBUG nova.objects.instance [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lazy-loading 'resources' on Instance uuid 912303de-a79d-41b0-ab44-c79e850a4dee {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server [None req-46ebe739-b54f-46c0-b2ef-e603d32e0414 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Exception during message handling: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 798.660486] env[68569]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 168, in decorated_function [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 159, in decorated_function [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 215, in decorated_function [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 205, in decorated_function [ 798.661150] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 798.661647] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6583, in resize_instance [ 798.661647] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 798.661647] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 798.661647] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 798.661647] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 798.661647] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 798.661647] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6580, in resize_instance [ 798.661647] env[68569]: ERROR oslo_messaging.rpc.server self._resize_instance(context, instance, image, migration, [ 798.661647] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 798.661647] env[68569]: ERROR oslo_messaging.rpc.server disk_info = self.driver.migrate_disk_and_power_off( [ 798.661647] env[68569]: ERROR oslo_messaging.rpc.server TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 798.661647] env[68569]: ERROR oslo_messaging.rpc.server [ 798.793708] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9775662b-2a78-4214-9012-3b0b4c596617 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "492c0fa1-f821-496a-86c2-f7686479a733" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.307s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.016047] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "492c0fa1-f821-496a-86c2-f7686479a733" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.016047] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "492c0fa1-f821-496a-86c2-f7686479a733" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.016047] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "492c0fa1-f821-496a-86c2-f7686479a733-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.016473] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "492c0fa1-f821-496a-86c2-f7686479a733-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.016473] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "492c0fa1-f821-496a-86c2-f7686479a733-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.018713] env[68569]: INFO nova.compute.manager [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Terminating instance [ 799.066281] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166962, 'name': CloneVM_Task, 'duration_secs': 2.020006} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.066569] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Created linked-clone VM from snapshot [ 799.067347] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5d1aab-c7aa-4a96-ba45-aad4812e231b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.075051] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Uploading image 4d6b79e1-3973-463a-9f75-f85600bf2bbf {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 799.087506] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 799.087777] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c0c2faec-3626-42fa-a7d5-91b98d95beeb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.094231] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 799.094231] env[68569]: value = "task-3166964" [ 799.094231] env[68569]: _type = "Task" [ 799.094231] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.109871] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166964, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.139530] env[68569]: DEBUG nova.compute.manager [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 799.175142] env[68569]: DEBUG nova.virt.hardware [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 799.175388] env[68569]: DEBUG nova.virt.hardware [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 799.175737] env[68569]: DEBUG nova.virt.hardware [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 799.175737] env[68569]: DEBUG nova.virt.hardware [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 799.175878] env[68569]: DEBUG nova.virt.hardware [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 799.175981] env[68569]: DEBUG nova.virt.hardware [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 799.177186] env[68569]: DEBUG nova.virt.hardware [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 799.177376] env[68569]: DEBUG nova.virt.hardware [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 799.177551] env[68569]: DEBUG nova.virt.hardware [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 799.177714] env[68569]: DEBUG nova.virt.hardware [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 799.178114] env[68569]: DEBUG nova.virt.hardware [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 799.182017] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b109949-e22e-4133-87a5-51e2e6934b47 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.187507] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35401ab6-166e-43d1-9177-830c489fc88d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.209531] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 799.215252] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Creating folder: Project (ef25c172cdd84b79a1fa5e57b804c510). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 799.218231] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e5489fd-0367-4f7d-819d-3ee9e2cc7e58 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.230500] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Created folder: Project (ef25c172cdd84b79a1fa5e57b804c510) in parent group-v633430. [ 799.230500] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Creating folder: Instances. Parent ref: group-v633579. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 799.230500] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a37b7bb6-e4ef-47a0-a73d-8ea6c569238b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.239164] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Created folder: Instances in parent group-v633579. [ 799.239488] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 799.242592] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50abc994-682a-40d6-ae77-601839b98793] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 799.243944] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c445ac1e-79df-4798-afcb-0d26c23373cb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.263851] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 799.263851] env[68569]: value = "task-3166967" [ 799.263851] env[68569]: _type = "Task" [ 799.263851] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.271246] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166967, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.522736] env[68569]: DEBUG nova.compute.manager [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 799.523279] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 799.523279] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da3bc347-8fb6-4a99-bd6f-abba80617b25 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.532826] env[68569]: DEBUG oslo_vmware.api [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 799.532826] env[68569]: value = "task-3166968" [ 799.532826] env[68569]: _type = "Task" [ 799.532826] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.546934] env[68569]: DEBUG oslo_vmware.api [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166968, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.556173] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.556405] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.606216] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166964, 'name': Destroy_Task, 'duration_secs': 0.307086} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.608950] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Destroyed the VM [ 799.609263] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 799.610488] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0602bebd-1693-44ec-806c-9682af92d893 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.617139] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 799.617139] env[68569]: value = "task-3166969" [ 799.617139] env[68569]: _type = "Task" [ 799.617139] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.628695] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166969, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.674652] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191a4c60-baed-4691-860c-c138b3b555c8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.682408] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ae610b-3e46-4cdc-ab74-de07407ccebb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.718226] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9cbe7f-4bdc-426b-90c2-006e136147ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.726526] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8606ab7d-8453-4c19-baf5-453a9d577bc8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.743127] env[68569]: DEBUG nova.compute.provider_tree [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.772451] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166967, 'name': CreateVM_Task, 'duration_secs': 0.321381} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.772662] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50abc994-682a-40d6-ae77-601839b98793] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 799.773202] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.773379] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.773871] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 799.774283] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ee9069a-714e-4513-ab81-db7b15dd9c0b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.778830] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 799.778830] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c8e37a-fd6f-6c9b-aee6-cc2771c01c46" [ 799.778830] env[68569]: _type = "Task" [ 799.778830] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.787372] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c8e37a-fd6f-6c9b-aee6-cc2771c01c46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.046057] env[68569]: DEBUG oslo_vmware.api [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166968, 'name': PowerOffVM_Task, 'duration_secs': 0.233755} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.046339] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 800.046972] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 800.046972] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633576', 'volume_id': 'fe9cc839-3100-4968-9dac-3e4881da0f59', 'name': 'volume-fe9cc839-3100-4968-9dac-3e4881da0f59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '492c0fa1-f821-496a-86c2-f7686479a733', 'attached_at': '', 'detached_at': '', 'volume_id': 'fe9cc839-3100-4968-9dac-3e4881da0f59', 'serial': 'fe9cc839-3100-4968-9dac-3e4881da0f59'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 800.047551] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35af32d7-72e9-4edc-841d-1f978e7176d9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.079288] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbab66e-dad9-48fa-a09d-70cce676cd75 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.082826] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.083072] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.083072] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.083376] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.083376] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.084552] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.084552] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68569) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11160}} [ 800.084552] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.087412] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00338efa-b35d-4a82-b8a0-bbc07dff0cef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.110594] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd73d2c-bf8a-477b-ba12-7b0837cddea2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.129985] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] The volume has not been displaced from its original location: [datastore1] volume-fe9cc839-3100-4968-9dac-3e4881da0f59/volume-fe9cc839-3100-4968-9dac-3e4881da0f59.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 800.135564] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Reconfiguring VM instance instance-00000031 to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 800.139050] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81b72132-4d07-44eb-87e6-446b2cb48507 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.164260] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166969, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.165871] env[68569]: DEBUG oslo_vmware.api [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 800.165871] env[68569]: value = "task-3166970" [ 800.165871] env[68569]: _type = "Task" [ 800.165871] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.174324] env[68569]: DEBUG oslo_vmware.api [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166970, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.247957] env[68569]: DEBUG nova.scheduler.client.report [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 800.294483] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c8e37a-fd6f-6c9b-aee6-cc2771c01c46, 'name': SearchDatastore_Task, 'duration_secs': 0.010213} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.294913] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.295229] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 800.295539] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.295742] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.295974] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 800.296852] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae82790e-e652-4e8a-b0c6-a3b64436535c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.309561] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 800.309561] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 800.310209] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6937d374-58d7-42dd-936a-01834423824a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.316506] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 800.316506] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52010221-888f-a18e-c5c2-792261a7489c" [ 800.316506] env[68569]: _type = "Task" [ 800.316506] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.326972] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52010221-888f-a18e-c5c2-792261a7489c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.588072] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.639796] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166969, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.677201] env[68569]: DEBUG oslo_vmware.api [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166970, 'name': ReconfigVM_Task, 'duration_secs': 0.294021} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.677201] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Reconfigured VM instance instance-00000031 to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 800.681742] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7761281e-6d3b-4f09-bb8a-2696808a7bc0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.697394] env[68569]: DEBUG oslo_vmware.api [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 800.697394] env[68569]: value = "task-3166971" [ 800.697394] env[68569]: _type = "Task" [ 800.697394] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.706123] env[68569]: DEBUG oslo_vmware.api [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166971, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.754032] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.097s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.755908] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.423s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.757936] env[68569]: INFO nova.compute.claims [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.782912] env[68569]: INFO nova.scheduler.client.report [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Deleted allocations for instance 912303de-a79d-41b0-ab44-c79e850a4dee [ 800.827938] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52010221-888f-a18e-c5c2-792261a7489c, 'name': SearchDatastore_Task, 'duration_secs': 0.019561} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.829279] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-822a59f6-6d54-4d3d-b5e5-cb0ae3fc17cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.834980] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 800.834980] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529e4c84-da6e-f3d3-6c66-06a7a93c2bea" [ 800.834980] env[68569]: _type = "Task" [ 800.834980] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.843429] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529e4c84-da6e-f3d3-6c66-06a7a93c2bea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.139622] env[68569]: DEBUG oslo_vmware.api [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166969, 'name': RemoveSnapshot_Task, 'duration_secs': 1.261131} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.139866] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 801.207874] env[68569]: DEBUG oslo_vmware.api [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166971, 'name': ReconfigVM_Task, 'duration_secs': 0.222507} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.208164] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633576', 'volume_id': 'fe9cc839-3100-4968-9dac-3e4881da0f59', 'name': 'volume-fe9cc839-3100-4968-9dac-3e4881da0f59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '492c0fa1-f821-496a-86c2-f7686479a733', 'attached_at': '', 'detached_at': '', 'volume_id': 'fe9cc839-3100-4968-9dac-3e4881da0f59', 'serial': 'fe9cc839-3100-4968-9dac-3e4881da0f59'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 801.208435] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 801.209249] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe073f1-9f5d-4947-bf92-56c107e4d377 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.215886] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 801.215994] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d36c7c6-35a3-450d-b9e2-564c7fb00777 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.272122] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 801.272248] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 801.272458] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleting the datastore file [datastore2] 492c0fa1-f821-496a-86c2-f7686479a733 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 801.273107] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0bd6a12-32be-4e1a-a450-6253485bfc0a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.281386] env[68569]: DEBUG oslo_vmware.api [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 801.281386] env[68569]: value = "task-3166973" [ 801.281386] env[68569]: _type = "Task" [ 801.281386] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.293012] env[68569]: DEBUG oslo_vmware.api [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166973, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.293465] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7c0c8a77-d9c9-4a5f-869b-3a058ef791bb tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "912303de-a79d-41b0-ab44-c79e850a4dee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.086s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.346683] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529e4c84-da6e-f3d3-6c66-06a7a93c2bea, 'name': SearchDatastore_Task, 'duration_secs': 0.015306} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.346997] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.347306] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 50abc994-682a-40d6-ae77-601839b98793/50abc994-682a-40d6-ae77-601839b98793.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 801.347628] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b607944d-7c3f-4566-811b-0d969d5e0625 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.355657] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 801.355657] env[68569]: value = "task-3166974" [ 801.355657] env[68569]: _type = "Task" [ 801.355657] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.363908] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.644358] env[68569]: WARNING nova.compute.manager [None req-d79d8295-346b-4b79-8001-9d3259d03799 tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Image not found during snapshot: nova.exception.ImageNotFound: Image 4d6b79e1-3973-463a-9f75-f85600bf2bbf could not be found. [ 801.794278] env[68569]: DEBUG oslo_vmware.api [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3166973, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156372} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.794976] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 801.795220] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 801.795776] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 801.795776] env[68569]: INFO nova.compute.manager [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Took 2.27 seconds to destroy the instance on the hypervisor. [ 801.795776] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 801.795985] env[68569]: DEBUG nova.compute.manager [-] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 801.796070] env[68569]: DEBUG nova.network.neutron [-] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 801.869279] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166974, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.286883] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.287149] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.287351] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.287522] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.287677] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.293212] env[68569]: INFO nova.compute.manager [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Terminating instance [ 802.345460] env[68569]: DEBUG nova.compute.manager [req-d89b37db-af60-4164-a082-8152c7160463 req-22b0a3a8-928b-4dab-99d0-f281971d4af4 service nova] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Received event network-vif-deleted-9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 802.345827] env[68569]: INFO nova.compute.manager [req-d89b37db-af60-4164-a082-8152c7160463 req-22b0a3a8-928b-4dab-99d0-f281971d4af4 service nova] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Neutron deleted interface 9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0; detaching it from the instance and deleting it from the info cache [ 802.346189] env[68569]: DEBUG nova.network.neutron [req-d89b37db-af60-4164-a082-8152c7160463 req-22b0a3a8-928b-4dab-99d0-f281971d4af4 service nova] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.372173] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166974, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644868} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.373378] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 50abc994-682a-40d6-ae77-601839b98793/50abc994-682a-40d6-ae77-601839b98793.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 802.373625] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 802.375079] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf2f48e-18de-4b1f-9fbe-2d2353226bdc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.377575] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c25388d-21a7-418a-967c-df580252aab5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.384252] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063e2e30-0de3-4d5b-89b1-66b100aece2d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.389332] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 802.389332] env[68569]: value = "task-3166975" [ 802.389332] env[68569]: _type = "Task" [ 802.389332] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.421919] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-409dba50-687f-47a9-8355-032d8bf8dcb4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.427291] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166975, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.433336] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b2014c-e83c-4af6-bf26-965b246bc8f4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.447351] env[68569]: DEBUG nova.compute.provider_tree [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.759487] env[68569]: DEBUG nova.network.neutron [-] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.799417] env[68569]: DEBUG nova.compute.manager [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 802.799672] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 802.800539] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67bf52e-2b09-4c47-91f4-7a9fdd75d0f2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.808427] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 802.808651] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91749c95-9a20-46f6-8848-651871a32156 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.814409] env[68569]: DEBUG oslo_vmware.api [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 802.814409] env[68569]: value = "task-3166976" [ 802.814409] env[68569]: _type = "Task" [ 802.814409] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.821835] env[68569]: DEBUG oslo_vmware.api [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166976, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.849747] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c664bc7a-e287-40dc-adac-0474c120171d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.858166] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c646ee05-d12e-4bd5-9f4f-2d773c0deb91 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.888849] env[68569]: DEBUG nova.compute.manager [req-d89b37db-af60-4164-a082-8152c7160463 req-22b0a3a8-928b-4dab-99d0-f281971d4af4 service nova] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Detach interface failed, port_id=9e78b86b-e16d-4f1b-88b0-67d95aa0dfb0, reason: Instance 492c0fa1-f821-496a-86c2-f7686479a733 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 802.898099] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166975, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065355} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.898365] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 802.899126] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f36cd8a-0544-4f53-bbbd-311fbcacbe69 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.918844] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 50abc994-682a-40d6-ae77-601839b98793/50abc994-682a-40d6-ae77-601839b98793.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 802.919165] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2d096d4-5192-4d34-b5ec-2ab9f5d6c118 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.938209] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 802.938209] env[68569]: value = "task-3166977" [ 802.938209] env[68569]: _type = "Task" [ 802.938209] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.945997] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166977, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.951998] env[68569]: DEBUG nova.scheduler.client.report [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 803.261762] env[68569]: INFO nova.compute.manager [-] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Took 1.47 seconds to deallocate network for instance. [ 803.327650] env[68569]: DEBUG oslo_vmware.api [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166976, 'name': PowerOffVM_Task, 'duration_secs': 0.191702} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.328333] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 803.328552] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 803.328819] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b4b189f-a506-4788-a19d-ff374131778f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.394063] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 803.394642] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 803.394970] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Deleting the datastore file [datastore1] 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 803.395419] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-623ca86d-954d-4255-b476-2e9cdb5bc1c0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.403314] env[68569]: DEBUG oslo_vmware.api [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 803.403314] env[68569]: value = "task-3166979" [ 803.403314] env[68569]: _type = "Task" [ 803.403314] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.411219] env[68569]: DEBUG oslo_vmware.api [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166979, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.449915] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166977, 'name': ReconfigVM_Task, 'duration_secs': 0.2696} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.449915] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 50abc994-682a-40d6-ae77-601839b98793/50abc994-682a-40d6-ae77-601839b98793.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 803.450872] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b00a32b-df60-4246-b5eb-2de955935d25 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.458638] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 803.458638] env[68569]: value = "task-3166980" [ 803.458638] env[68569]: _type = "Task" [ 803.458638] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.462476] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.707s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.463729] env[68569]: DEBUG nova.compute.manager [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 803.466899] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.725s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.467195] env[68569]: DEBUG nova.objects.instance [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Lazy-loading 'resources' on Instance uuid ad207187-634f-4e7f-9809-eb3f742ddeec {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 803.474213] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166980, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.822136] env[68569]: INFO nova.compute.manager [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Took 0.56 seconds to detach 1 volumes for instance. [ 803.915964] env[68569]: DEBUG oslo_vmware.api [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3166979, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149292} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.915964] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 803.915964] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 803.915964] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 803.915964] env[68569]: INFO nova.compute.manager [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Took 1.11 seconds to destroy the instance on the hypervisor. [ 803.916489] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 803.916489] env[68569]: DEBUG nova.compute.manager [-] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 803.916489] env[68569]: DEBUG nova.network.neutron [-] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 803.969163] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166980, 'name': Rename_Task, 'duration_secs': 0.129074} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.969163] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 803.969163] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3135f868-fbfc-4b4c-b053-91931402e762 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.975172] env[68569]: DEBUG nova.compute.utils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 803.976963] env[68569]: DEBUG nova.compute.manager [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 803.979631] env[68569]: DEBUG nova.network.neutron [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 803.989666] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 803.989666] env[68569]: value = "task-3166981" [ 803.989666] env[68569]: _type = "Task" [ 803.989666] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.003395] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166981, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.048524] env[68569]: DEBUG nova.policy [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6198db2a02d4f2980f133f13bafd49e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1de4680b5844def8ff462ea7ef6a25c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 804.327088] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.426155] env[68569]: DEBUG nova.compute.manager [req-cde782f6-2ecf-4ceb-86df-7283ceac0c98 req-be284610-d49c-48b7-8d12-6df76b7c1646 service nova] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Received event network-vif-deleted-aa644f82-a73f-492b-9432-6fce0cc9def1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 804.426155] env[68569]: INFO nova.compute.manager [req-cde782f6-2ecf-4ceb-86df-7283ceac0c98 req-be284610-d49c-48b7-8d12-6df76b7c1646 service nova] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Neutron deleted interface aa644f82-a73f-492b-9432-6fce0cc9def1; detaching it from the instance and deleting it from the info cache [ 804.426155] env[68569]: DEBUG nova.network.neutron [req-cde782f6-2ecf-4ceb-86df-7283ceac0c98 req-be284610-d49c-48b7-8d12-6df76b7c1646 service nova] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.484568] env[68569]: DEBUG nova.compute.utils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 804.504439] env[68569]: DEBUG oslo_vmware.api [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166981, 'name': PowerOnVM_Task, 'duration_secs': 0.421102} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.505965] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 804.506164] env[68569]: INFO nova.compute.manager [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Took 5.37 seconds to spawn the instance on the hypervisor. [ 804.506339] env[68569]: DEBUG nova.compute.manager [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 804.507189] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cbbf200-c2d1-4ad2-84d2-cc9e62ed0778 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.525582] env[68569]: DEBUG nova.network.neutron [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Successfully created port: 9ecb2363-68e3-455a-8b8f-db5226a52abf {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 804.592026] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53fb76fb-4d6f-4d66-8322-3c4e9d22ddea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.598238] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fc5c09-59ca-4cbf-a30e-11288d968649 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.631864] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42644838-8a37-430a-b187-d48c0d7798cd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.640583] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7fac7a-b6d5-425b-b938-9e28228b1757 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.656316] env[68569]: DEBUG nova.compute.provider_tree [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.755371] env[68569]: DEBUG nova.network.neutron [-] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.935363] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d80e963-8b96-4bb9-b9d2-325d4a0a22ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.947544] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32078648-518d-41dd-bb94-6167246687d5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.977592] env[68569]: DEBUG nova.compute.manager [req-cde782f6-2ecf-4ceb-86df-7283ceac0c98 req-be284610-d49c-48b7-8d12-6df76b7c1646 service nova] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Detach interface failed, port_id=aa644f82-a73f-492b-9432-6fce0cc9def1, reason: Instance 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 804.987452] env[68569]: DEBUG nova.compute.manager [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 805.035334] env[68569]: INFO nova.compute.manager [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Took 50.64 seconds to build instance. [ 805.162020] env[68569]: DEBUG nova.scheduler.client.report [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 805.259375] env[68569]: INFO nova.compute.manager [-] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Took 1.34 seconds to deallocate network for instance. [ 805.538730] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5800bada-25ff-4519-bd15-47f499c06324 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Lock "50abc994-682a-40d6-ae77-601839b98793" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.428s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.666430] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.199s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.668715] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.129s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.669219] env[68569]: DEBUG nova.objects.instance [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lazy-loading 'resources' on Instance uuid 925400c4-3b33-4f4a-9f63-3ceec06cf0b7 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 805.696468] env[68569]: INFO nova.scheduler.client.report [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Deleted allocations for instance ad207187-634f-4e7f-9809-eb3f742ddeec [ 805.741320] env[68569]: INFO nova.compute.manager [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Rebuilding instance [ 805.765799] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.784583] env[68569]: DEBUG nova.compute.manager [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 805.785474] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae15d6e-7f53-4a4f-ba61-17110186bff4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.003461] env[68569]: DEBUG nova.compute.manager [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 806.035849] env[68569]: DEBUG nova.virt.hardware [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:50:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='30905327',id=20,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1309280867',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 806.035849] env[68569]: DEBUG nova.virt.hardware [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 806.035849] env[68569]: DEBUG nova.virt.hardware [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 806.036098] env[68569]: DEBUG nova.virt.hardware [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 806.036098] env[68569]: DEBUG nova.virt.hardware [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 806.036400] env[68569]: DEBUG nova.virt.hardware [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 806.036735] env[68569]: DEBUG nova.virt.hardware [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 806.037113] env[68569]: DEBUG nova.virt.hardware [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 806.037373] env[68569]: DEBUG nova.virt.hardware [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 806.037892] env[68569]: DEBUG nova.virt.hardware [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 806.037964] env[68569]: DEBUG nova.virt.hardware [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 806.039020] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27451511-f2ac-49f6-b35c-b75c6034d801 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.042809] env[68569]: DEBUG nova.compute.manager [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 806.054226] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecba8590-6d12-4c24-b89d-397523707854 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.206109] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c163201e-84e5-4caf-b26f-383a25e285b6 tempest-AttachInterfacesUnderV243Test-1529000804 tempest-AttachInterfacesUnderV243Test-1529000804-project-member] Lock "ad207187-634f-4e7f-9809-eb3f742ddeec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.730s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.250054] env[68569]: DEBUG nova.compute.manager [req-f8440433-5982-41c0-bf5e-d9cd5c5e9305 req-65a728fa-ccb6-4135-aabd-a79529b60537 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Received event network-vif-plugged-9ecb2363-68e3-455a-8b8f-db5226a52abf {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 806.250306] env[68569]: DEBUG oslo_concurrency.lockutils [req-f8440433-5982-41c0-bf5e-d9cd5c5e9305 req-65a728fa-ccb6-4135-aabd-a79529b60537 service nova] Acquiring lock "53cc8dbd-c163-403a-9286-e1f8ad939f94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.250549] env[68569]: DEBUG oslo_concurrency.lockutils [req-f8440433-5982-41c0-bf5e-d9cd5c5e9305 req-65a728fa-ccb6-4135-aabd-a79529b60537 service nova] Lock "53cc8dbd-c163-403a-9286-e1f8ad939f94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.250785] env[68569]: DEBUG oslo_concurrency.lockutils [req-f8440433-5982-41c0-bf5e-d9cd5c5e9305 req-65a728fa-ccb6-4135-aabd-a79529b60537 service nova] Lock "53cc8dbd-c163-403a-9286-e1f8ad939f94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.250996] env[68569]: DEBUG nova.compute.manager [req-f8440433-5982-41c0-bf5e-d9cd5c5e9305 req-65a728fa-ccb6-4135-aabd-a79529b60537 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] No waiting events found dispatching network-vif-plugged-9ecb2363-68e3-455a-8b8f-db5226a52abf {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 806.251228] env[68569]: WARNING nova.compute.manager [req-f8440433-5982-41c0-bf5e-d9cd5c5e9305 req-65a728fa-ccb6-4135-aabd-a79529b60537 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Received unexpected event network-vif-plugged-9ecb2363-68e3-455a-8b8f-db5226a52abf for instance with vm_state building and task_state spawning. [ 806.423610] env[68569]: DEBUG nova.network.neutron [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Successfully updated port: 9ecb2363-68e3-455a-8b8f-db5226a52abf {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 806.563396] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.685025] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae01696d-b7d5-44e9-b79b-c2770f89adf7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.696299] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd336dd4-caf5-4ee9-90f8-60c257a8bb4c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.729350] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0388d9eb-f276-4973-bd4c-d5f618eb796e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.737097] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c54ba61-d37f-4818-bbec-d43fe9abfdb4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.756987] env[68569]: DEBUG nova.compute.provider_tree [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.805734] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 806.806262] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37349524-37fd-42e8-96f3-07da0a74c00e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.813052] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 806.813052] env[68569]: value = "task-3166985" [ 806.813052] env[68569]: _type = "Task" [ 806.813052] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.821373] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.931418] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "refresh_cache-53cc8dbd-c163-403a-9286-e1f8ad939f94" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.931418] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquired lock "refresh_cache-53cc8dbd-c163-403a-9286-e1f8ad939f94" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.931418] env[68569]: DEBUG nova.network.neutron [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.260561] env[68569]: DEBUG nova.scheduler.client.report [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 807.322911] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166985, 'name': PowerOffVM_Task, 'duration_secs': 0.176539} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.323345] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 807.323740] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 807.325185] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d622e6f9-01be-4d05-a471-adb677e72893 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.334229] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 807.334784] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8813296-6615-4a9f-9e1e-02c97365ee25 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.361200] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 807.361466] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 807.361658] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Deleting the datastore file [datastore1] 50abc994-682a-40d6-ae77-601839b98793 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 807.361936] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06302cab-50b2-4dab-beef-ea38972cfc16 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.369851] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 807.369851] env[68569]: value = "task-3166987" [ 807.369851] env[68569]: _type = "Task" [ 807.369851] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.377641] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166987, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.488854] env[68569]: DEBUG nova.network.neutron [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.693743] env[68569]: DEBUG nova.network.neutron [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Updating instance_info_cache with network_info: [{"id": "9ecb2363-68e3-455a-8b8f-db5226a52abf", "address": "fa:16:3e:5e:79:75", "network": {"id": "1ff7b043-4f7f-405a-b1d8-d45bcd02c339", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-521857199-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1de4680b5844def8ff462ea7ef6a25c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ecb2363-68", "ovs_interfaceid": "9ecb2363-68e3-455a-8b8f-db5226a52abf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.768809] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.097s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 807.768809] env[68569]: DEBUG oslo_concurrency.lockutils [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.169s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.768809] env[68569]: DEBUG nova.objects.instance [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Lazy-loading 'resources' on Instance uuid 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 807.798354] env[68569]: INFO nova.scheduler.client.report [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Deleted allocations for instance 925400c4-3b33-4f4a-9f63-3ceec06cf0b7 [ 807.883312] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166987, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.119634} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.883617] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 807.883872] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 807.884104] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 808.198384] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Releasing lock "refresh_cache-53cc8dbd-c163-403a-9286-e1f8ad939f94" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.199374] env[68569]: DEBUG nova.compute.manager [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Instance network_info: |[{"id": "9ecb2363-68e3-455a-8b8f-db5226a52abf", "address": "fa:16:3e:5e:79:75", "network": {"id": "1ff7b043-4f7f-405a-b1d8-d45bcd02c339", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-521857199-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1de4680b5844def8ff462ea7ef6a25c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ecb2363-68", "ovs_interfaceid": "9ecb2363-68e3-455a-8b8f-db5226a52abf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 808.199599] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:79:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46785c9c-8b22-487d-a854-b3e67c5ed1d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9ecb2363-68e3-455a-8b8f-db5226a52abf', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 808.212477] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 808.212617] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 808.214729] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9cbcac2-d3d8-4104-9f9f-b66b78bf7f74 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.233898] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 808.233898] env[68569]: value = "task-3166988" [ 808.233898] env[68569]: _type = "Task" [ 808.233898] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.243877] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166988, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.287311] env[68569]: DEBUG nova.compute.manager [req-9bb8f841-06b0-468e-8d80-462fa56f2593 req-6986fd65-6f5a-46d8-93c2-111d7a0f4581 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Received event network-changed-9ecb2363-68e3-455a-8b8f-db5226a52abf {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 808.287675] env[68569]: DEBUG nova.compute.manager [req-9bb8f841-06b0-468e-8d80-462fa56f2593 req-6986fd65-6f5a-46d8-93c2-111d7a0f4581 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Refreshing instance network info cache due to event network-changed-9ecb2363-68e3-455a-8b8f-db5226a52abf. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 808.287836] env[68569]: DEBUG oslo_concurrency.lockutils [req-9bb8f841-06b0-468e-8d80-462fa56f2593 req-6986fd65-6f5a-46d8-93c2-111d7a0f4581 service nova] Acquiring lock "refresh_cache-53cc8dbd-c163-403a-9286-e1f8ad939f94" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.288029] env[68569]: DEBUG oslo_concurrency.lockutils [req-9bb8f841-06b0-468e-8d80-462fa56f2593 req-6986fd65-6f5a-46d8-93c2-111d7a0f4581 service nova] Acquired lock "refresh_cache-53cc8dbd-c163-403a-9286-e1f8ad939f94" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.288242] env[68569]: DEBUG nova.network.neutron [req-9bb8f841-06b0-468e-8d80-462fa56f2593 req-6986fd65-6f5a-46d8-93c2-111d7a0f4581 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Refreshing network info cache for port 9ecb2363-68e3-455a-8b8f-db5226a52abf {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 808.309480] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0bf7bb2f-f078-4d81-ae12-65dbe33d6127 tempest-MigrationsAdminTest-375800074 tempest-MigrationsAdminTest-375800074-project-member] Lock "925400c4-3b33-4f4a-9f63-3ceec06cf0b7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.297s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.704192] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0131691e-f51f-4a75-a106-89dabe6af55b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.715796] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a767b2db-69ee-4c6a-9a76-46c52b3778d5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.756951] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851c18c8-2a19-4437-8c13-6f1fc332de53 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.770436] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166988, 'name': CreateVM_Task, 'duration_secs': 0.302831} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.773072] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 808.773929] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.774112] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.774509] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 808.775756] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166c74e7-363c-4c65-8343-967c51f4a863 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.779985] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99938df4-6460-47db-88a6-67f0b1bfd928 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.797965] env[68569]: DEBUG nova.compute.provider_tree [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.799529] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 808.799529] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523700d5-0689-6ae7-8935-a4ff9073bfa6" [ 808.799529] env[68569]: _type = "Task" [ 808.799529] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.807644] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523700d5-0689-6ae7-8935-a4ff9073bfa6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.923290] env[68569]: DEBUG nova.virt.hardware [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 808.923575] env[68569]: DEBUG nova.virt.hardware [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 808.923735] env[68569]: DEBUG nova.virt.hardware [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 808.923913] env[68569]: DEBUG nova.virt.hardware [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 808.924068] env[68569]: DEBUG nova.virt.hardware [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 808.924223] env[68569]: DEBUG nova.virt.hardware [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 808.924444] env[68569]: DEBUG nova.virt.hardware [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 808.924599] env[68569]: DEBUG nova.virt.hardware [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 808.924761] env[68569]: DEBUG nova.virt.hardware [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 808.924917] env[68569]: DEBUG nova.virt.hardware [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 808.925101] env[68569]: DEBUG nova.virt.hardware [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 808.925987] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264883c2-2ca7-4676-b619-06fdd7a3c6f3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.933395] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141e0ae7-014a-4443-b026-9f64d658721e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.947244] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 808.952763] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 808.955418] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50abc994-682a-40d6-ae77-601839b98793] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 808.955568] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d08c1ba2-5e9d-42a0-befa-d4fba135321e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.973733] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 808.973733] env[68569]: value = "task-3166990" [ 808.973733] env[68569]: _type = "Task" [ 808.973733] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.982420] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166990, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.088732] env[68569]: DEBUG nova.network.neutron [req-9bb8f841-06b0-468e-8d80-462fa56f2593 req-6986fd65-6f5a-46d8-93c2-111d7a0f4581 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Updated VIF entry in instance network info cache for port 9ecb2363-68e3-455a-8b8f-db5226a52abf. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 809.089329] env[68569]: DEBUG nova.network.neutron [req-9bb8f841-06b0-468e-8d80-462fa56f2593 req-6986fd65-6f5a-46d8-93c2-111d7a0f4581 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Updating instance_info_cache with network_info: [{"id": "9ecb2363-68e3-455a-8b8f-db5226a52abf", "address": "fa:16:3e:5e:79:75", "network": {"id": "1ff7b043-4f7f-405a-b1d8-d45bcd02c339", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-521857199-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1de4680b5844def8ff462ea7ef6a25c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ecb2363-68", "ovs_interfaceid": "9ecb2363-68e3-455a-8b8f-db5226a52abf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.302968] env[68569]: DEBUG nova.scheduler.client.report [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 809.323872] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523700d5-0689-6ae7-8935-a4ff9073bfa6, 'name': SearchDatastore_Task, 'duration_secs': 0.033175} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.324352] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.324605] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 809.324880] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.325078] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.325303] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.325977] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e89dbe7-0c95-428b-a1c1-2d2658ab094c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.340977] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 809.341198] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 809.341995] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0037b093-601d-4f70-a9d3-63849f58a81a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.349093] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 809.349093] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b53fb7-cb29-87c4-a291-e9f3bd082883" [ 809.349093] env[68569]: _type = "Task" [ 809.349093] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.358167] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b53fb7-cb29-87c4-a291-e9f3bd082883, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.483523] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3166990, 'name': CreateVM_Task, 'duration_secs': 0.359154} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.484329] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50abc994-682a-40d6-ae77-601839b98793] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 809.484329] env[68569]: DEBUG oslo_concurrency.lockutils [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.484329] env[68569]: DEBUG oslo_concurrency.lockutils [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.484589] env[68569]: DEBUG oslo_concurrency.lockutils [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 809.484847] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-858441ad-1c90-4b6b-bcd4-4dc0c2c8a6ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.489042] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 809.489042] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524d78cd-c438-5fd4-1fcc-9377c252c9ba" [ 809.489042] env[68569]: _type = "Task" [ 809.489042] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.496368] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524d78cd-c438-5fd4-1fcc-9377c252c9ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.592851] env[68569]: DEBUG oslo_concurrency.lockutils [req-9bb8f841-06b0-468e-8d80-462fa56f2593 req-6986fd65-6f5a-46d8-93c2-111d7a0f4581 service nova] Releasing lock "refresh_cache-53cc8dbd-c163-403a-9286-e1f8ad939f94" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.816009] env[68569]: DEBUG oslo_concurrency.lockutils [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.048s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.818531] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.298s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.818779] env[68569]: DEBUG nova.objects.instance [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lazy-loading 'resources' on Instance uuid 39a84212-2e52-4dba-b00c-5689564deaf4 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 809.838287] env[68569]: INFO nova.scheduler.client.report [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Deleted allocations for instance 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0 [ 809.861019] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b53fb7-cb29-87c4-a291-e9f3bd082883, 'name': SearchDatastore_Task, 'duration_secs': 0.05017} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.861943] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d7034af-a7be-4892-82b2-aee46b3a52f4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.867599] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 809.867599] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527e622a-f6a1-80ba-ca8c-7c5a076e94b8" [ 809.867599] env[68569]: _type = "Task" [ 809.867599] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.876404] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527e622a-f6a1-80ba-ca8c-7c5a076e94b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.000756] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524d78cd-c438-5fd4-1fcc-9377c252c9ba, 'name': SearchDatastore_Task, 'duration_secs': 0.010669} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.001071] env[68569]: DEBUG oslo_concurrency.lockutils [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.001309] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 810.001542] env[68569]: DEBUG oslo_concurrency.lockutils [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.349089] env[68569]: DEBUG oslo_concurrency.lockutils [None req-91f83ba8-3d3b-4b7e-8eba-decaf1fa4736 tempest-InstanceActionsTestJSON-569561072 tempest-InstanceActionsTestJSON-569561072-project-member] Lock "9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.699s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.386020] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527e622a-f6a1-80ba-ca8c-7c5a076e94b8, 'name': SearchDatastore_Task, 'duration_secs': 0.00883} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.386020] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.386020] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 53cc8dbd-c163-403a-9286-e1f8ad939f94/53cc8dbd-c163-403a-9286-e1f8ad939f94.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 810.386020] env[68569]: DEBUG oslo_concurrency.lockutils [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 810.386499] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 810.386499] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c0f8d9e-a6f1-4fe6-8744-1976b1164606 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.386499] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c7b657a-d5ba-47e1-9181-6b074bdeb68f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.397175] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 810.397175] env[68569]: value = "task-3166991" [ 810.397175] env[68569]: _type = "Task" [ 810.397175] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.398362] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 810.399437] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 810.404025] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bc7a951-11ed-488e-bff8-b610fdc6a2d5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.416910] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166991, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.420012] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 810.420012] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5209a173-63fd-a71d-6319-c01e779a4eaf" [ 810.420012] env[68569]: _type = "Task" [ 810.420012] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.431845] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5209a173-63fd-a71d-6319-c01e779a4eaf, 'name': SearchDatastore_Task, 'duration_secs': 0.008889} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.431845] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-daec378d-fa85-4ad3-abe0-362aea8f5096 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.437146] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 810.437146] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5273901a-ded4-896e-5b48-2d8752fedf2a" [ 810.437146] env[68569]: _type = "Task" [ 810.437146] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.445471] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5273901a-ded4-896e-5b48-2d8752fedf2a, 'name': SearchDatastore_Task, 'duration_secs': 0.007283} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.448042] env[68569]: DEBUG oslo_concurrency.lockutils [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.448320] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 50abc994-682a-40d6-ae77-601839b98793/50abc994-682a-40d6-ae77-601839b98793.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 810.448774] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-23772a5e-a2be-4b4e-9597-bb81b644a8c8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.457239] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 810.457239] env[68569]: value = "task-3166992" [ 810.457239] env[68569]: _type = "Task" [ 810.457239] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.465425] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166992, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.786848] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b679f1b9-6a2a-4124-92cc-ebae80c493d2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.796469] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609cd200-4971-4165-89ed-7863a0105e3f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.830985] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b82f7a4-04c8-49dd-973c-745630bd8079 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.840161] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c39b0af-02e4-4b8d-ba91-35af3e3eb774 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.856739] env[68569]: DEBUG nova.compute.provider_tree [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.910024] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166991, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480837} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.910024] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 53cc8dbd-c163-403a-9286-e1f8ad939f94/53cc8dbd-c163-403a-9286-e1f8ad939f94.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 810.910024] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 810.910309] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee5c6b03-0136-4a55-a209-f032e04fb8f4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.920438] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 810.920438] env[68569]: value = "task-3166993" [ 810.920438] env[68569]: _type = "Task" [ 810.920438] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.929202] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166993, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.967413] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166992, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.361141] env[68569]: DEBUG nova.scheduler.client.report [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 811.434482] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166993, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090213} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.434821] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 811.435843] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0cc265f-9af1-40be-9b97-56239917135f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.468945] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 53cc8dbd-c163-403a-9286-e1f8ad939f94/53cc8dbd-c163-403a-9286-e1f8ad939f94.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 811.473997] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-522d6830-d76e-49cf-b2bd-c858e56a0049 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.496068] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166992, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.732558} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.497375] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 50abc994-682a-40d6-ae77-601839b98793/50abc994-682a-40d6-ae77-601839b98793.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 811.497837] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 811.498103] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 811.498103] env[68569]: value = "task-3166995" [ 811.498103] env[68569]: _type = "Task" [ 811.498103] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.498281] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-20b68f13-a1ad-4301-8173-039edbc1bbc7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.508367] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166995, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.509367] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 811.509367] env[68569]: value = "task-3166996" [ 811.509367] env[68569]: _type = "Task" [ 811.509367] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.518823] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166996, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.869196] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.050s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.873174] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.137s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.875970] env[68569]: INFO nova.compute.claims [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 811.899940] env[68569]: INFO nova.scheduler.client.report [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Deleted allocations for instance 39a84212-2e52-4dba-b00c-5689564deaf4 [ 812.011318] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166995, 'name': ReconfigVM_Task, 'duration_secs': 0.260004} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.016186] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 53cc8dbd-c163-403a-9286-e1f8ad939f94/53cc8dbd-c163-403a-9286-e1f8ad939f94.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 812.016186] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=68569) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 812.016862] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-07506db5-0f54-41d3-aca8-b3a3c761af33 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.023876] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166996, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06582} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.025971] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 812.026365] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 812.026365] env[68569]: value = "task-3166997" [ 812.026365] env[68569]: _type = "Task" [ 812.026365] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.027082] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f72320d-f935-46e3-a5d5-da218303026f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.053195] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 50abc994-682a-40d6-ae77-601839b98793/50abc994-682a-40d6-ae77-601839b98793.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 812.057205] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3994f34e-1a5f-4407-9f98-d7d61c30f25a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.079432] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166997, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.084045] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 812.084045] env[68569]: value = "task-3166998" [ 812.084045] env[68569]: _type = "Task" [ 812.084045] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.094183] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166998, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.408024] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4809d318-2b94-4d11-a9b8-2a2ef89215b4 tempest-FloatingIPsAssociationTestJSON-2126733400 tempest-FloatingIPsAssociationTestJSON-2126733400-project-member] Lock "39a84212-2e52-4dba-b00c-5689564deaf4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.337s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.543057] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166997, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.064315} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.543811] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=68569) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 812.544160] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f3cfa0-b7cb-4453-bce9-389a029206d2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.569831] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 53cc8dbd-c163-403a-9286-e1f8ad939f94/ephemeral_0.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 812.570186] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33763849-eeb7-4794-813c-d7325ba368e6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.589364] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 812.589364] env[68569]: value = "task-3166999" [ 812.589364] env[68569]: _type = "Task" [ 812.589364] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.595648] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3166998, 'name': ReconfigVM_Task, 'duration_secs': 0.308148} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.596116] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 50abc994-682a-40d6-ae77-601839b98793/50abc994-682a-40d6-ae77-601839b98793.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 812.596727] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7a6eb89-61de-4065-96f9-a3ce0520e293 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.601374] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166999, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.607716] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 812.607716] env[68569]: value = "task-3167000" [ 812.607716] env[68569]: _type = "Task" [ 812.607716] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.617007] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3167000, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.109161] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166999, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.118396] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3167000, 'name': Rename_Task, 'duration_secs': 0.128253} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.119750] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 813.119750] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e55c72c-3641-4bf9-9d8b-68db69bc9df1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.130610] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 813.130610] env[68569]: value = "task-3167002" [ 813.130610] env[68569]: _type = "Task" [ 813.130610] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.138353] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3167002, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.404039] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b866e4b-f72d-4056-ad7c-3cf8af5ed7d4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.413339] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892313ed-5f63-4ad5-8e7c-df21993aa00e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.447839] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f3533f-087e-4f40-b4a9-b35a7cdde7b0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.456340] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a69c61-05d0-4290-978d-c1db164f3ba0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.471653] env[68569]: DEBUG nova.compute.provider_tree [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.603711] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3166999, 'name': ReconfigVM_Task, 'duration_secs': 0.540365} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.604163] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 53cc8dbd-c163-403a-9286-e1f8ad939f94/ephemeral_0.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 813.605178] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-96cd2a52-05aa-4d2a-8e3b-2544dfb219bc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.612148] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 813.612148] env[68569]: value = "task-3167003" [ 813.612148] env[68569]: _type = "Task" [ 813.612148] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.622138] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3167003, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.640483] env[68569]: DEBUG oslo_vmware.api [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3167002, 'name': PowerOnVM_Task, 'duration_secs': 0.430116} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.640906] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 813.640997] env[68569]: DEBUG nova.compute.manager [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 813.641882] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5db1f23-2e20-466f-ad61-e636d9c1910a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.974891] env[68569]: DEBUG nova.scheduler.client.report [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 814.124301] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3167003, 'name': Rename_Task, 'duration_secs': 0.403794} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.124301] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 814.124493] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f5bc7fe-1c94-4fb9-a5dd-8a8e34857d90 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.131054] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 814.131054] env[68569]: value = "task-3167004" [ 814.131054] env[68569]: _type = "Task" [ 814.131054] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.143402] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3167004, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.160460] env[68569]: DEBUG oslo_concurrency.lockutils [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.484319] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.611s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.484319] env[68569]: DEBUG nova.compute.manager [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 814.489404] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.730s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.491571] env[68569]: INFO nova.compute.claims [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 814.638835] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "61aa0997-ffa6-4551-bdaa-132026e240f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.639133] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "61aa0997-ffa6-4551-bdaa-132026e240f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.646239] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3167004, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.996997] env[68569]: DEBUG nova.compute.utils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 815.004056] env[68569]: DEBUG nova.compute.manager [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 815.004056] env[68569]: DEBUG nova.network.neutron [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 815.047473] env[68569]: INFO nova.compute.manager [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Rebuilding instance [ 815.051522] env[68569]: DEBUG nova.policy [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0fc0aab70b841d991610ba829cc6660', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d628075adbb43b8a572072277c25741', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 815.103322] env[68569]: DEBUG nova.compute.manager [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 815.105163] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5fc4b64-9604-42d0-afde-4be40651be9b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.144501] env[68569]: DEBUG oslo_vmware.api [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3167004, 'name': PowerOnVM_Task, 'duration_secs': 0.51448} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.144791] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 815.144990] env[68569]: INFO nova.compute.manager [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Took 9.14 seconds to spawn the instance on the hypervisor. [ 815.145185] env[68569]: DEBUG nova.compute.manager [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 815.145964] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee032fba-726e-43c8-b39a-ed8f69479e77 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.403287] env[68569]: DEBUG nova.network.neutron [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Successfully created port: 3250ac37-e76e-40a8-a2d7-a0ad04d568f3 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 815.503333] env[68569]: DEBUG nova.compute.manager [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 815.666726] env[68569]: INFO nova.compute.manager [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Took 57.35 seconds to build instance. [ 815.980853] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e80b79-8465-4208-aeb3-365c5397f83c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.988298] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb10866a-7d88-4bdb-8718-0b75e21ada5e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.027184] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2f53c0-f2c7-44c9-9201-6f165f0d0d4c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.036442] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed40a0f-bc7e-4cfa-aac3-b1ecd4a78827 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.050841] env[68569]: DEBUG nova.compute.provider_tree [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.124026] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 816.124026] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dfdcd5d9-700e-44ec-9f2f-1a117f752dc5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.131313] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Waiting for the task: (returnval){ [ 816.131313] env[68569]: value = "task-3167006" [ 816.131313] env[68569]: _type = "Task" [ 816.131313] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.139844] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': task-3167006, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.169808] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d46431b-f0a1-4f71-b64c-bd8054a7d4b3 tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "53cc8dbd-c163-403a-9286-e1f8ad939f94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.788s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.531560] env[68569]: DEBUG nova.compute.manager [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 816.592690] env[68569]: DEBUG nova.scheduler.client.report [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 816.592690] env[68569]: DEBUG nova.virt.hardware [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 816.593226] env[68569]: DEBUG nova.virt.hardware [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 816.593226] env[68569]: DEBUG nova.virt.hardware [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 816.593226] env[68569]: DEBUG nova.virt.hardware [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 816.593226] env[68569]: DEBUG nova.virt.hardware [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 816.593226] env[68569]: DEBUG nova.virt.hardware [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 816.593505] env[68569]: DEBUG nova.virt.hardware [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 816.593505] env[68569]: DEBUG nova.virt.hardware [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 816.593505] env[68569]: DEBUG nova.virt.hardware [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 816.593505] env[68569]: DEBUG nova.virt.hardware [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 816.593505] env[68569]: DEBUG nova.virt.hardware [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 816.593834] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98bc671-965b-4297-8c0c-9229be999daf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.593834] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f456a6-477f-4f5e-aad2-36b2c1b789e2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.617089] env[68569]: DEBUG nova.compute.manager [req-cfc6f4ae-efe1-451c-881a-13066d8a8644 req-d64ad183-3c16-4691-8b67-ed6dc8649947 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Received event network-changed-9ecb2363-68e3-455a-8b8f-db5226a52abf {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 816.618338] env[68569]: DEBUG nova.compute.manager [req-cfc6f4ae-efe1-451c-881a-13066d8a8644 req-d64ad183-3c16-4691-8b67-ed6dc8649947 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Refreshing instance network info cache due to event network-changed-9ecb2363-68e3-455a-8b8f-db5226a52abf. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 816.618555] env[68569]: DEBUG oslo_concurrency.lockutils [req-cfc6f4ae-efe1-451c-881a-13066d8a8644 req-d64ad183-3c16-4691-8b67-ed6dc8649947 service nova] Acquiring lock "refresh_cache-53cc8dbd-c163-403a-9286-e1f8ad939f94" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.618704] env[68569]: DEBUG oslo_concurrency.lockutils [req-cfc6f4ae-efe1-451c-881a-13066d8a8644 req-d64ad183-3c16-4691-8b67-ed6dc8649947 service nova] Acquired lock "refresh_cache-53cc8dbd-c163-403a-9286-e1f8ad939f94" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.618911] env[68569]: DEBUG nova.network.neutron [req-cfc6f4ae-efe1-451c-881a-13066d8a8644 req-d64ad183-3c16-4691-8b67-ed6dc8649947 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Refreshing network info cache for port 9ecb2363-68e3-455a-8b8f-db5226a52abf {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 816.654665] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': task-3167006, 'name': PowerOffVM_Task, 'duration_secs': 0.107299} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.655594] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 816.655862] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 816.656687] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469eae8f-3e87-4854-8c20-71166c3f3e86 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.674530] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 816.675198] env[68569]: DEBUG nova.compute.manager [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 816.680560] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d0338114-b36a-435b-b107-d2c12593dc1e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.709837] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 816.710120] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 816.713096] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Deleting the datastore file [datastore2] 50abc994-682a-40d6-ae77-601839b98793 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 816.713219] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-281ca28e-e86e-46f1-8c0b-fc2ddb99cde0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.724446] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Waiting for the task: (returnval){ [ 816.724446] env[68569]: value = "task-3167008" [ 816.724446] env[68569]: _type = "Task" [ 816.724446] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.741827] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': task-3167008, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.073519] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.584s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.076489] env[68569]: DEBUG nova.compute.manager [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 817.079207] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.726s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.081085] env[68569]: INFO nova.compute.claims [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 817.207988] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.235766] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': task-3167008, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.352868} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.236155] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 817.236408] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 817.236625] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 817.258704] env[68569]: DEBUG nova.compute.manager [req-37018a92-5a35-4fcb-9e85-5e86b149cce3 req-5087f174-89ac-4e01-be92-e0e8e322bf36 service nova] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Received event network-vif-plugged-3250ac37-e76e-40a8-a2d7-a0ad04d568f3 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 817.258704] env[68569]: DEBUG oslo_concurrency.lockutils [req-37018a92-5a35-4fcb-9e85-5e86b149cce3 req-5087f174-89ac-4e01-be92-e0e8e322bf36 service nova] Acquiring lock "cc5139e1-4601-4966-9224-25b8223b8a57-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.258704] env[68569]: DEBUG oslo_concurrency.lockutils [req-37018a92-5a35-4fcb-9e85-5e86b149cce3 req-5087f174-89ac-4e01-be92-e0e8e322bf36 service nova] Lock "cc5139e1-4601-4966-9224-25b8223b8a57-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.258980] env[68569]: DEBUG oslo_concurrency.lockutils [req-37018a92-5a35-4fcb-9e85-5e86b149cce3 req-5087f174-89ac-4e01-be92-e0e8e322bf36 service nova] Lock "cc5139e1-4601-4966-9224-25b8223b8a57-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.260482] env[68569]: DEBUG nova.compute.manager [req-37018a92-5a35-4fcb-9e85-5e86b149cce3 req-5087f174-89ac-4e01-be92-e0e8e322bf36 service nova] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] No waiting events found dispatching network-vif-plugged-3250ac37-e76e-40a8-a2d7-a0ad04d568f3 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 817.260482] env[68569]: WARNING nova.compute.manager [req-37018a92-5a35-4fcb-9e85-5e86b149cce3 req-5087f174-89ac-4e01-be92-e0e8e322bf36 service nova] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Received unexpected event network-vif-plugged-3250ac37-e76e-40a8-a2d7-a0ad04d568f3 for instance with vm_state building and task_state spawning. [ 817.325800] env[68569]: DEBUG nova.network.neutron [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Successfully updated port: 3250ac37-e76e-40a8-a2d7-a0ad04d568f3 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 817.441241] env[68569]: DEBUG nova.network.neutron [req-cfc6f4ae-efe1-451c-881a-13066d8a8644 req-d64ad183-3c16-4691-8b67-ed6dc8649947 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Updated VIF entry in instance network info cache for port 9ecb2363-68e3-455a-8b8f-db5226a52abf. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 817.441613] env[68569]: DEBUG nova.network.neutron [req-cfc6f4ae-efe1-451c-881a-13066d8a8644 req-d64ad183-3c16-4691-8b67-ed6dc8649947 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Updating instance_info_cache with network_info: [{"id": "9ecb2363-68e3-455a-8b8f-db5226a52abf", "address": "fa:16:3e:5e:79:75", "network": {"id": "1ff7b043-4f7f-405a-b1d8-d45bcd02c339", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-521857199-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1de4680b5844def8ff462ea7ef6a25c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ecb2363-68", "ovs_interfaceid": "9ecb2363-68e3-455a-8b8f-db5226a52abf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.588167] env[68569]: DEBUG nova.compute.utils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 817.589700] env[68569]: DEBUG nova.compute.manager [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 817.589870] env[68569]: DEBUG nova.network.neutron [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 817.625326] env[68569]: DEBUG nova.policy [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f4c009f7a67243209e544bd160ccbd31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e8c967899024e00b5e50aa3e1d0f454', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 817.828837] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "refresh_cache-cc5139e1-4601-4966-9224-25b8223b8a57" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.829129] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquired lock "refresh_cache-cc5139e1-4601-4966-9224-25b8223b8a57" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.829335] env[68569]: DEBUG nova.network.neutron [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 817.932394] env[68569]: DEBUG nova.network.neutron [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Successfully created port: 87002a64-15a4-4a08-879c-483a9bf211c0 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 817.944512] env[68569]: DEBUG oslo_concurrency.lockutils [req-cfc6f4ae-efe1-451c-881a-13066d8a8644 req-d64ad183-3c16-4691-8b67-ed6dc8649947 service nova] Releasing lock "refresh_cache-53cc8dbd-c163-403a-9286-e1f8ad939f94" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.093279] env[68569]: DEBUG nova.compute.manager [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 818.289425] env[68569]: DEBUG nova.virt.hardware [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 818.291388] env[68569]: DEBUG nova.virt.hardware [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 818.291388] env[68569]: DEBUG nova.virt.hardware [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 818.291388] env[68569]: DEBUG nova.virt.hardware [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 818.291388] env[68569]: DEBUG nova.virt.hardware [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 818.291388] env[68569]: DEBUG nova.virt.hardware [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 818.291388] env[68569]: DEBUG nova.virt.hardware [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 818.291581] env[68569]: DEBUG nova.virt.hardware [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 818.291581] env[68569]: DEBUG nova.virt.hardware [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 818.291581] env[68569]: DEBUG nova.virt.hardware [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 818.291581] env[68569]: DEBUG nova.virt.hardware [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 818.293288] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136d18c5-baf6-4aba-86f6-5efd41e13061 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.303775] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6744e0-edb3-4702-bfd1-0be2e5c83836 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.319235] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 818.325020] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 818.325330] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50abc994-682a-40d6-ae77-601839b98793] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 818.325585] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f416394-d1bf-4d88-a1e6-7c6ed7675c44 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.348368] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 818.348368] env[68569]: value = "task-3167009" [ 818.348368] env[68569]: _type = "Task" [ 818.348368] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.356611] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167009, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.387667] env[68569]: DEBUG nova.network.neutron [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 818.527191] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d19c65c-1321-45cc-b429-f4567c8ca953 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.539241] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f357f828-762d-4835-a187-44bab9a341df {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.570466] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a521f8a-ba00-40a4-afc9-766d118b1ddc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.578239] env[68569]: DEBUG nova.network.neutron [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Updating instance_info_cache with network_info: [{"id": "3250ac37-e76e-40a8-a2d7-a0ad04d568f3", "address": "fa:16:3e:a4:40:28", "network": {"id": "d6edc444-be83-4191-989a-6abe41074736", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-628698740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d628075adbb43b8a572072277c25741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3250ac37-e7", "ovs_interfaceid": "3250ac37-e76e-40a8-a2d7-a0ad04d568f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.580586] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248cd21b-9201-4063-a74f-b7cde1a4c046 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.596256] env[68569]: DEBUG nova.compute.provider_tree [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.857675] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167009, 'name': CreateVM_Task, 'duration_secs': 0.263916} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.857864] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50abc994-682a-40d6-ae77-601839b98793] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 818.858305] env[68569]: DEBUG oslo_concurrency.lockutils [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.858481] env[68569]: DEBUG oslo_concurrency.lockutils [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.858762] env[68569]: DEBUG oslo_concurrency.lockutils [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 818.858992] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-debe2f07-6dd1-4b75-b0b2-6d69f272702c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.863521] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Waiting for the task: (returnval){ [ 818.863521] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d14366-27f1-e5cb-0bb1-4e0f162617d2" [ 818.863521] env[68569]: _type = "Task" [ 818.863521] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.870646] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d14366-27f1-e5cb-0bb1-4e0f162617d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.087946] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Releasing lock "refresh_cache-cc5139e1-4601-4966-9224-25b8223b8a57" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.088395] env[68569]: DEBUG nova.compute.manager [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Instance network_info: |[{"id": "3250ac37-e76e-40a8-a2d7-a0ad04d568f3", "address": "fa:16:3e:a4:40:28", "network": {"id": "d6edc444-be83-4191-989a-6abe41074736", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-628698740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d628075adbb43b8a572072277c25741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3250ac37-e7", "ovs_interfaceid": "3250ac37-e76e-40a8-a2d7-a0ad04d568f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 819.088847] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:40:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62f28d75-4e6a-4ae5-b8b3-d0652ea26d08', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3250ac37-e76e-40a8-a2d7-a0ad04d568f3', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 819.096739] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 819.096922] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 819.097168] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4d4cd810-16a1-4c1b-bef0-9c3a8b85bc12 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.112694] env[68569]: DEBUG nova.compute.manager [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 819.115090] env[68569]: DEBUG nova.scheduler.client.report [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 819.122752] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 819.122752] env[68569]: value = "task-3167010" [ 819.122752] env[68569]: _type = "Task" [ 819.122752] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.130122] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167010, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.140295] env[68569]: DEBUG nova.virt.hardware [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 819.140519] env[68569]: DEBUG nova.virt.hardware [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 819.140672] env[68569]: DEBUG nova.virt.hardware [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 819.140851] env[68569]: DEBUG nova.virt.hardware [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 819.140995] env[68569]: DEBUG nova.virt.hardware [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 819.141156] env[68569]: DEBUG nova.virt.hardware [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 819.141410] env[68569]: DEBUG nova.virt.hardware [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 819.141577] env[68569]: DEBUG nova.virt.hardware [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 819.141773] env[68569]: DEBUG nova.virt.hardware [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 819.141920] env[68569]: DEBUG nova.virt.hardware [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 819.142104] env[68569]: DEBUG nova.virt.hardware [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 819.143126] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1e14fd-7577-4c28-a084-376b122b89a7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.151250] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c32a28c-4f18-4548-9799-aa3f48bb9253 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.283713] env[68569]: DEBUG nova.compute.manager [req-0c614667-2adb-4d30-9cc0-dd940dec481e req-6b93cc82-588b-4053-b25d-1f1081a09e14 service nova] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Received event network-changed-3250ac37-e76e-40a8-a2d7-a0ad04d568f3 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 819.283858] env[68569]: DEBUG nova.compute.manager [req-0c614667-2adb-4d30-9cc0-dd940dec481e req-6b93cc82-588b-4053-b25d-1f1081a09e14 service nova] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Refreshing instance network info cache due to event network-changed-3250ac37-e76e-40a8-a2d7-a0ad04d568f3. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 819.284117] env[68569]: DEBUG oslo_concurrency.lockutils [req-0c614667-2adb-4d30-9cc0-dd940dec481e req-6b93cc82-588b-4053-b25d-1f1081a09e14 service nova] Acquiring lock "refresh_cache-cc5139e1-4601-4966-9224-25b8223b8a57" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.284335] env[68569]: DEBUG oslo_concurrency.lockutils [req-0c614667-2adb-4d30-9cc0-dd940dec481e req-6b93cc82-588b-4053-b25d-1f1081a09e14 service nova] Acquired lock "refresh_cache-cc5139e1-4601-4966-9224-25b8223b8a57" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.284414] env[68569]: DEBUG nova.network.neutron [req-0c614667-2adb-4d30-9cc0-dd940dec481e req-6b93cc82-588b-4053-b25d-1f1081a09e14 service nova] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Refreshing network info cache for port 3250ac37-e76e-40a8-a2d7-a0ad04d568f3 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 819.299695] env[68569]: DEBUG nova.compute.manager [req-f3a94f74-61b9-4bcf-9d3b-1f1cdfd4fa82 req-fa0f1550-90c0-43a6-9ff6-916894d6bb32 service nova] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Received event network-vif-plugged-87002a64-15a4-4a08-879c-483a9bf211c0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 819.299695] env[68569]: DEBUG oslo_concurrency.lockutils [req-f3a94f74-61b9-4bcf-9d3b-1f1cdfd4fa82 req-fa0f1550-90c0-43a6-9ff6-916894d6bb32 service nova] Acquiring lock "9eafa273-097b-48ac-ae5f-4f7a469ac861-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.299695] env[68569]: DEBUG oslo_concurrency.lockutils [req-f3a94f74-61b9-4bcf-9d3b-1f1cdfd4fa82 req-fa0f1550-90c0-43a6-9ff6-916894d6bb32 service nova] Lock "9eafa273-097b-48ac-ae5f-4f7a469ac861-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.299695] env[68569]: DEBUG oslo_concurrency.lockutils [req-f3a94f74-61b9-4bcf-9d3b-1f1cdfd4fa82 req-fa0f1550-90c0-43a6-9ff6-916894d6bb32 service nova] Lock "9eafa273-097b-48ac-ae5f-4f7a469ac861-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.299695] env[68569]: DEBUG nova.compute.manager [req-f3a94f74-61b9-4bcf-9d3b-1f1cdfd4fa82 req-fa0f1550-90c0-43a6-9ff6-916894d6bb32 service nova] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] No waiting events found dispatching network-vif-plugged-87002a64-15a4-4a08-879c-483a9bf211c0 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 819.299948] env[68569]: WARNING nova.compute.manager [req-f3a94f74-61b9-4bcf-9d3b-1f1cdfd4fa82 req-fa0f1550-90c0-43a6-9ff6-916894d6bb32 service nova] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Received unexpected event network-vif-plugged-87002a64-15a4-4a08-879c-483a9bf211c0 for instance with vm_state building and task_state spawning. [ 819.374964] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d14366-27f1-e5cb-0bb1-4e0f162617d2, 'name': SearchDatastore_Task, 'duration_secs': 0.01161} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.375357] env[68569]: DEBUG oslo_concurrency.lockutils [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.375658] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 819.375933] env[68569]: DEBUG oslo_concurrency.lockutils [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.376168] env[68569]: DEBUG oslo_concurrency.lockutils [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.376371] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 819.376642] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd424062-6fc6-48a3-aeb0-dfaf2053e067 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.385367] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 819.385544] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 819.386286] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75280a2b-915b-4545-a11f-c917c66083e9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.391312] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Waiting for the task: (returnval){ [ 819.391312] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526d8b4f-5828-a27a-42e5-d116ad411815" [ 819.391312] env[68569]: _type = "Task" [ 819.391312] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.402356] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526d8b4f-5828-a27a-42e5-d116ad411815, 'name': SearchDatastore_Task, 'duration_secs': 0.00854} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.404546] env[68569]: DEBUG nova.network.neutron [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Successfully updated port: 87002a64-15a4-4a08-879c-483a9bf211c0 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 819.404921] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14ef76b7-324a-4a77-b1d2-f00bfce9d2a4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.413653] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Waiting for the task: (returnval){ [ 819.413653] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5242896f-1548-9512-d8dc-10fa89bee2d6" [ 819.413653] env[68569]: _type = "Task" [ 819.413653] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.423648] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5242896f-1548-9512-d8dc-10fa89bee2d6, 'name': SearchDatastore_Task, 'duration_secs': 0.010188} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.424407] env[68569]: DEBUG oslo_concurrency.lockutils [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.424486] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 50abc994-682a-40d6-ae77-601839b98793/50abc994-682a-40d6-ae77-601839b98793.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 819.424716] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-598d0bd0-960b-4991-88e7-a58e5613293f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.431948] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Waiting for the task: (returnval){ [ 819.431948] env[68569]: value = "task-3167012" [ 819.431948] env[68569]: _type = "Task" [ 819.431948] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.441201] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': task-3167012, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.622039] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.542s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.622039] env[68569]: DEBUG nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 819.624511] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.160s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.624753] env[68569]: DEBUG nova.objects.instance [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Lazy-loading 'resources' on Instance uuid b2e6de60-b4e5-4030-bca7-355d17fec06d {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 819.638782] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167010, 'name': CreateVM_Task, 'duration_secs': 0.30059} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.639814] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 819.640940] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.641147] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.641577] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 819.642234] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70a8622e-1b27-49c9-bfbb-cefd5f2782d0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.651604] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 819.651604] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a948eb-e8e2-395e-9304-10082354d60a" [ 819.651604] env[68569]: _type = "Task" [ 819.651604] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.664600] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a948eb-e8e2-395e-9304-10082354d60a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.910029] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Acquiring lock "refresh_cache-9eafa273-097b-48ac-ae5f-4f7a469ac861" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.910029] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Acquired lock "refresh_cache-9eafa273-097b-48ac-ae5f-4f7a469ac861" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.910029] env[68569]: DEBUG nova.network.neutron [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.945773] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': task-3167012, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501721} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.946107] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 50abc994-682a-40d6-ae77-601839b98793/50abc994-682a-40d6-ae77-601839b98793.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 819.946258] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 819.946510] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9604f40f-7299-4dc9-b002-a04f85d83f43 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.953030] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Waiting for the task: (returnval){ [ 819.953030] env[68569]: value = "task-3167013" [ 819.953030] env[68569]: _type = "Task" [ 819.953030] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.961324] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': task-3167013, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.129025] env[68569]: DEBUG nova.compute.utils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 820.132379] env[68569]: DEBUG nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 820.133398] env[68569]: DEBUG nova.network.neutron [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 820.169557] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a948eb-e8e2-395e-9304-10082354d60a, 'name': SearchDatastore_Task, 'duration_secs': 0.062178} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.169883] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.170138] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 820.170385] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.170529] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.170706] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 820.170963] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd3207e4-7b1e-425c-8491-c184e45f1fe3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.181263] env[68569]: DEBUG nova.policy [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c4a384e5a314a83b75928dea39bf78c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10edb645f90c45edbd3aa43bfa24b86e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 820.185671] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 820.185847] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 820.188757] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d473659-5477-4625-a852-dace993785b1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.195716] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 820.195716] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5226d7cf-275f-3a8f-ef53-5c34c73b8a1d" [ 820.195716] env[68569]: _type = "Task" [ 820.195716] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.203321] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5226d7cf-275f-3a8f-ef53-5c34c73b8a1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.283860] env[68569]: DEBUG nova.network.neutron [req-0c614667-2adb-4d30-9cc0-dd940dec481e req-6b93cc82-588b-4053-b25d-1f1081a09e14 service nova] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Updated VIF entry in instance network info cache for port 3250ac37-e76e-40a8-a2d7-a0ad04d568f3. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 820.284294] env[68569]: DEBUG nova.network.neutron [req-0c614667-2adb-4d30-9cc0-dd940dec481e req-6b93cc82-588b-4053-b25d-1f1081a09e14 service nova] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Updating instance_info_cache with network_info: [{"id": "3250ac37-e76e-40a8-a2d7-a0ad04d568f3", "address": "fa:16:3e:a4:40:28", "network": {"id": "d6edc444-be83-4191-989a-6abe41074736", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-628698740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d628075adbb43b8a572072277c25741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62f28d75-4e6a-4ae5-b8b3-d0652ea26d08", "external-id": "nsx-vlan-transportzone-801", "segmentation_id": 801, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3250ac37-e7", "ovs_interfaceid": "3250ac37-e76e-40a8-a2d7-a0ad04d568f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.443484] env[68569]: DEBUG nova.network.neutron [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.464896] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': task-3167013, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104842} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.465177] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 820.466607] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acac7f95-ea36-4c43-8152-daffbe2fc049 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.488396] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 50abc994-682a-40d6-ae77-601839b98793/50abc994-682a-40d6-ae77-601839b98793.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 820.489077] env[68569]: DEBUG nova.network.neutron [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Successfully created port: 0769c926-1f70-44d4-8559-8b1f4e5aa14a {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 820.492883] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57fe7625-a27f-49f9-9d1a-cfd566ef1e01 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.518015] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Waiting for the task: (returnval){ [ 820.518015] env[68569]: value = "task-3167014" [ 820.518015] env[68569]: _type = "Task" [ 820.518015] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.529434] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': task-3167014, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.556222] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6448a01a-79a0-4557-bad1-ec7718dffefa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.564374] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e15fcbd-8601-4139-99a4-346c8aeab8a8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.594785] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e128de-1913-4cb1-862b-2db9b143434e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.603021] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181b6512-563a-4ba4-bcc3-a53489321904 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.616718] env[68569]: DEBUG nova.compute.provider_tree [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.634290] env[68569]: DEBUG nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 820.662409] env[68569]: DEBUG nova.network.neutron [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Updating instance_info_cache with network_info: [{"id": "87002a64-15a4-4a08-879c-483a9bf211c0", "address": "fa:16:3e:39:6e:7b", "network": {"id": "9233e679-9bbd-428f-ab68-c6d6a6fe6cd1", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1790008523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e8c967899024e00b5e50aa3e1d0f454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87002a64-15", "ovs_interfaceid": "87002a64-15a4-4a08-879c-483a9bf211c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.709426] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5226d7cf-275f-3a8f-ef53-5c34c73b8a1d, 'name': SearchDatastore_Task, 'duration_secs': 0.011039} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.710382] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd41e710-0f20-4f02-9b85-a8c0ad07624e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.717765] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 820.717765] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5215fe11-07d4-84fc-c2b3-1d3228222e21" [ 820.717765] env[68569]: _type = "Task" [ 820.717765] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.726945] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5215fe11-07d4-84fc-c2b3-1d3228222e21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.789332] env[68569]: DEBUG oslo_concurrency.lockutils [req-0c614667-2adb-4d30-9cc0-dd940dec481e req-6b93cc82-588b-4053-b25d-1f1081a09e14 service nova] Releasing lock "refresh_cache-cc5139e1-4601-4966-9224-25b8223b8a57" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.031407] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': task-3167014, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.119851] env[68569]: DEBUG nova.scheduler.client.report [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 821.164168] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Releasing lock "refresh_cache-9eafa273-097b-48ac-ae5f-4f7a469ac861" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.164519] env[68569]: DEBUG nova.compute.manager [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Instance network_info: |[{"id": "87002a64-15a4-4a08-879c-483a9bf211c0", "address": "fa:16:3e:39:6e:7b", "network": {"id": "9233e679-9bbd-428f-ab68-c6d6a6fe6cd1", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1790008523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e8c967899024e00b5e50aa3e1d0f454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87002a64-15", "ovs_interfaceid": "87002a64-15a4-4a08-879c-483a9bf211c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 821.165151] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:6e:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '779b8e65-8b9e-427e-af08-910febd65bfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87002a64-15a4-4a08-879c-483a9bf211c0', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 821.172268] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Creating folder: Project (3e8c967899024e00b5e50aa3e1d0f454). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 821.173158] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be5bee74-327c-4b4c-8105-5e8b1745f3e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.185600] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Created folder: Project (3e8c967899024e00b5e50aa3e1d0f454) in parent group-v633430. [ 821.185815] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Creating folder: Instances. Parent ref: group-v633590. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 821.186081] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f5c6274-8d0a-4624-8c40-73e3dac92aa1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.196284] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Created folder: Instances in parent group-v633590. [ 821.196525] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 821.196714] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 821.197916] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf6bce18-182b-4f43-9d10-dea10fa57b10 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.217744] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.217744] env[68569]: value = "task-3167017" [ 821.217744] env[68569]: _type = "Task" [ 821.217744] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.228712] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167017, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.232359] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5215fe11-07d4-84fc-c2b3-1d3228222e21, 'name': SearchDatastore_Task, 'duration_secs': 0.03764} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.232499] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.232764] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] cc5139e1-4601-4966-9224-25b8223b8a57/cc5139e1-4601-4966-9224-25b8223b8a57.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 821.233060] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b97958d-21cd-444f-9b79-67a509b6605e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.239998] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 821.239998] env[68569]: value = "task-3167018" [ 821.239998] env[68569]: _type = "Task" [ 821.239998] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.247969] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167018, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.327690] env[68569]: DEBUG nova.compute.manager [req-68f76755-21f6-465d-bb60-0046e2f0eded req-55cacb3d-9d79-4aac-b661-d3f3bbc53896 service nova] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Received event network-changed-87002a64-15a4-4a08-879c-483a9bf211c0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 821.327886] env[68569]: DEBUG nova.compute.manager [req-68f76755-21f6-465d-bb60-0046e2f0eded req-55cacb3d-9d79-4aac-b661-d3f3bbc53896 service nova] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Refreshing instance network info cache due to event network-changed-87002a64-15a4-4a08-879c-483a9bf211c0. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 821.328090] env[68569]: DEBUG oslo_concurrency.lockutils [req-68f76755-21f6-465d-bb60-0046e2f0eded req-55cacb3d-9d79-4aac-b661-d3f3bbc53896 service nova] Acquiring lock "refresh_cache-9eafa273-097b-48ac-ae5f-4f7a469ac861" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.328307] env[68569]: DEBUG oslo_concurrency.lockutils [req-68f76755-21f6-465d-bb60-0046e2f0eded req-55cacb3d-9d79-4aac-b661-d3f3bbc53896 service nova] Acquired lock "refresh_cache-9eafa273-097b-48ac-ae5f-4f7a469ac861" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.328490] env[68569]: DEBUG nova.network.neutron [req-68f76755-21f6-465d-bb60-0046e2f0eded req-55cacb3d-9d79-4aac-b661-d3f3bbc53896 service nova] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Refreshing network info cache for port 87002a64-15a4-4a08-879c-483a9bf211c0 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 821.532775] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': task-3167014, 'name': ReconfigVM_Task, 'duration_secs': 0.570361} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.533783] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 50abc994-682a-40d6-ae77-601839b98793/50abc994-682a-40d6-ae77-601839b98793.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 821.534465] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-231710cd-1b1d-4171-9fbc-d0761c7d7249 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.545226] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Waiting for the task: (returnval){ [ 821.545226] env[68569]: value = "task-3167020" [ 821.545226] env[68569]: _type = "Task" [ 821.545226] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.554904] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': task-3167020, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.625515] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.628489] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.545s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.630317] env[68569]: INFO nova.compute.claims [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 821.642080] env[68569]: DEBUG nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 821.657691] env[68569]: INFO nova.scheduler.client.report [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Deleted allocations for instance b2e6de60-b4e5-4030-bca7-355d17fec06d [ 821.676681] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 821.677416] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 821.677416] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 821.677416] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 821.677596] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 821.677760] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 821.678044] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 821.678213] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 821.678429] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 821.678619] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 821.678897] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 821.679949] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5314fe85-8929-4331-926c-364bae3817b6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.693345] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e219be-364b-45c5-bf13-5dd87487148e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.729653] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167017, 'name': CreateVM_Task, 'duration_secs': 0.349551} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.730472] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 821.730789] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.730997] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.731344] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 821.731611] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07a6f7dd-d5bf-456e-a05a-d35a249ef925 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.738529] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Waiting for the task: (returnval){ [ 821.738529] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525a8e28-0d95-2b30-3aca-8d71894bda64" [ 821.738529] env[68569]: _type = "Task" [ 821.738529] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.751684] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525a8e28-0d95-2b30-3aca-8d71894bda64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.755568] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167018, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.009201] env[68569]: DEBUG nova.compute.manager [req-a3f2da12-6751-42ad-a648-081c9bd9bad7 req-08371044-3a14-485d-abb9-208b5c4961ad service nova] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Received event network-vif-plugged-0769c926-1f70-44d4-8559-8b1f4e5aa14a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 822.009201] env[68569]: DEBUG oslo_concurrency.lockutils [req-a3f2da12-6751-42ad-a648-081c9bd9bad7 req-08371044-3a14-485d-abb9-208b5c4961ad service nova] Acquiring lock "57a63648-83e9-4f23-aebc-050e58149ce2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.009201] env[68569]: DEBUG oslo_concurrency.lockutils [req-a3f2da12-6751-42ad-a648-081c9bd9bad7 req-08371044-3a14-485d-abb9-208b5c4961ad service nova] Lock "57a63648-83e9-4f23-aebc-050e58149ce2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.009201] env[68569]: DEBUG oslo_concurrency.lockutils [req-a3f2da12-6751-42ad-a648-081c9bd9bad7 req-08371044-3a14-485d-abb9-208b5c4961ad service nova] Lock "57a63648-83e9-4f23-aebc-050e58149ce2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.009201] env[68569]: DEBUG nova.compute.manager [req-a3f2da12-6751-42ad-a648-081c9bd9bad7 req-08371044-3a14-485d-abb9-208b5c4961ad service nova] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] No waiting events found dispatching network-vif-plugged-0769c926-1f70-44d4-8559-8b1f4e5aa14a {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 822.009429] env[68569]: WARNING nova.compute.manager [req-a3f2da12-6751-42ad-a648-081c9bd9bad7 req-08371044-3a14-485d-abb9-208b5c4961ad service nova] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Received unexpected event network-vif-plugged-0769c926-1f70-44d4-8559-8b1f4e5aa14a for instance with vm_state building and task_state spawning. [ 822.056512] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': task-3167020, 'name': Rename_Task, 'duration_secs': 0.177555} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.057065] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 822.057620] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6124f309-fc9e-4f07-a16e-e9c470a88817 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.069065] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Waiting for the task: (returnval){ [ 822.069065] env[68569]: value = "task-3167021" [ 822.069065] env[68569]: _type = "Task" [ 822.069065] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.076673] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': task-3167021, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.079765] env[68569]: DEBUG nova.network.neutron [req-68f76755-21f6-465d-bb60-0046e2f0eded req-55cacb3d-9d79-4aac-b661-d3f3bbc53896 service nova] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Updated VIF entry in instance network info cache for port 87002a64-15a4-4a08-879c-483a9bf211c0. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 822.080226] env[68569]: DEBUG nova.network.neutron [req-68f76755-21f6-465d-bb60-0046e2f0eded req-55cacb3d-9d79-4aac-b661-d3f3bbc53896 service nova] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Updating instance_info_cache with network_info: [{"id": "87002a64-15a4-4a08-879c-483a9bf211c0", "address": "fa:16:3e:39:6e:7b", "network": {"id": "9233e679-9bbd-428f-ab68-c6d6a6fe6cd1", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1790008523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e8c967899024e00b5e50aa3e1d0f454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87002a64-15", "ovs_interfaceid": "87002a64-15a4-4a08-879c-483a9bf211c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.150790] env[68569]: DEBUG nova.network.neutron [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Successfully updated port: 0769c926-1f70-44d4-8559-8b1f4e5aa14a {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 822.169455] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0cedb1ef-aac3-48ef-b3d4-face8eddb6d3 tempest-ServersTestManualDisk-523803947 tempest-ServersTestManualDisk-523803947-project-member] Lock "b2e6de60-b4e5-4030-bca7-355d17fec06d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.438s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.254732] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525a8e28-0d95-2b30-3aca-8d71894bda64, 'name': SearchDatastore_Task, 'duration_secs': 0.058928} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.259043] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.259043] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 822.259043] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.259043] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.259228] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 822.259458] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167018, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.650672} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.259667] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc70099b-91a9-4113-850a-2aaccb3e37d5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.261685] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] cc5139e1-4601-4966-9224-25b8223b8a57/cc5139e1-4601-4966-9224-25b8223b8a57.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 822.261885] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 822.262146] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f7998ce-a81b-4a64-bfd0-6d62a204db6d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.270552] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 822.270552] env[68569]: value = "task-3167022" [ 822.270552] env[68569]: _type = "Task" [ 822.270552] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.275050] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 822.275333] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 822.276441] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24ce5bfb-c721-4076-b90a-1f7fbcc438dc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.282150] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167022, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.286445] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Waiting for the task: (returnval){ [ 822.286445] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5239d709-8d05-97dd-d24d-f4f86ee5b9a2" [ 822.286445] env[68569]: _type = "Task" [ 822.286445] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.297903] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5239d709-8d05-97dd-d24d-f4f86ee5b9a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.577390] env[68569]: DEBUG oslo_vmware.api [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Task: {'id': task-3167021, 'name': PowerOnVM_Task, 'duration_secs': 0.454165} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.577641] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 822.577918] env[68569]: DEBUG nova.compute.manager [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 822.578612] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ac5e03-db76-4501-ae46-c2cf6130345d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.582708] env[68569]: DEBUG oslo_concurrency.lockutils [req-68f76755-21f6-465d-bb60-0046e2f0eded req-55cacb3d-9d79-4aac-b661-d3f3bbc53896 service nova] Releasing lock "refresh_cache-9eafa273-097b-48ac-ae5f-4f7a469ac861" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.656204] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "refresh_cache-57a63648-83e9-4f23-aebc-050e58149ce2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.656452] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired lock "refresh_cache-57a63648-83e9-4f23-aebc-050e58149ce2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.656617] env[68569]: DEBUG nova.network.neutron [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 822.783359] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167022, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068912} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.783615] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 822.784411] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dfba091-cdd7-4d09-bfae-eb2b6f07f120 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.811111] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5239d709-8d05-97dd-d24d-f4f86ee5b9a2, 'name': SearchDatastore_Task, 'duration_secs': 0.009997} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.819734] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] cc5139e1-4601-4966-9224-25b8223b8a57/cc5139e1-4601-4966-9224-25b8223b8a57.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 822.823021] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b61a50f6-49fd-4c48-bf7c-544b20c58fa3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.837410] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13361063-f503-4182-b453-3c8ab4b79607 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.846447] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Waiting for the task: (returnval){ [ 822.846447] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52113a33-99ec-5a9a-7a25-dedb8abeeeec" [ 822.846447] env[68569]: _type = "Task" [ 822.846447] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.850019] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 822.850019] env[68569]: value = "task-3167023" [ 822.850019] env[68569]: _type = "Task" [ 822.850019] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.863081] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52113a33-99ec-5a9a-7a25-dedb8abeeeec, 'name': SearchDatastore_Task, 'duration_secs': 0.010979} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.866146] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.866423] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 9eafa273-097b-48ac-ae5f-4f7a469ac861/9eafa273-097b-48ac-ae5f-4f7a469ac861.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 822.866688] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167023, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.866893] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-420bf9d9-0aa1-41f1-999a-fb4e17c178d7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.874122] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Waiting for the task: (returnval){ [ 822.874122] env[68569]: value = "task-3167024" [ 822.874122] env[68569]: _type = "Task" [ 822.874122] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.883102] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167024, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.085691] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339dc4ee-cfd4-4e82-9ddc-f73e81048675 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.103287] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741dc417-663d-4bc5-9c63-90f944220dbe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.108827] env[68569]: DEBUG oslo_concurrency.lockutils [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.140419] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc0dc4c-a93c-4232-ad6d-2642bf60e54d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.151227] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68af4f8a-9190-468f-9ccf-613d687e69c0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.168928] env[68569]: DEBUG nova.compute.provider_tree [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 823.200614] env[68569]: DEBUG nova.network.neutron [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.366806] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167023, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.385686] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167024, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498267} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.386075] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 9eafa273-097b-48ac-ae5f-4f7a469ac861/9eafa273-097b-48ac-ae5f-4f7a469ac861.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 823.386693] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 823.387092] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e0e942c-cb2f-41bf-b75e-6f4035591259 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.395507] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Waiting for the task: (returnval){ [ 823.395507] env[68569]: value = "task-3167025" [ 823.395507] env[68569]: _type = "Task" [ 823.395507] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.408580] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167025, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.411699] env[68569]: DEBUG nova.network.neutron [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Updating instance_info_cache with network_info: [{"id": "0769c926-1f70-44d4-8559-8b1f4e5aa14a", "address": "fa:16:3e:23:83:69", "network": {"id": "a4dfe87c-fd90-4643-a4b6-43c5de9bb65b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-789473346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10edb645f90c45edbd3aa43bfa24b86e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0769c926-1f", "ovs_interfaceid": "0769c926-1f70-44d4-8559-8b1f4e5aa14a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.704473] env[68569]: ERROR nova.scheduler.client.report [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [req-b8706cd3-178c-4796-b53b-92d27316f322] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b8706cd3-178c-4796-b53b-92d27316f322"}]} [ 823.727117] env[68569]: DEBUG nova.scheduler.client.report [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 823.745696] env[68569]: DEBUG nova.scheduler.client.report [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 823.745937] env[68569]: DEBUG nova.compute.provider_tree [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 823.762614] env[68569]: DEBUG nova.scheduler.client.report [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 823.784818] env[68569]: DEBUG nova.scheduler.client.report [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 823.867065] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167023, 'name': ReconfigVM_Task, 'duration_secs': 0.598676} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.867610] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Reconfigured VM instance instance-00000035 to attach disk [datastore1] cc5139e1-4601-4966-9224-25b8223b8a57/cc5139e1-4601-4966-9224-25b8223b8a57.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 823.868321] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11c9876f-9389-4a27-a7de-0ac0e04ec910 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.876722] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 823.876722] env[68569]: value = "task-3167027" [ 823.876722] env[68569]: _type = "Task" [ 823.876722] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.886742] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167027, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.910682] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167025, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.916980] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Releasing lock "refresh_cache-57a63648-83e9-4f23-aebc-050e58149ce2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.916980] env[68569]: DEBUG nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Instance network_info: |[{"id": "0769c926-1f70-44d4-8559-8b1f4e5aa14a", "address": "fa:16:3e:23:83:69", "network": {"id": "a4dfe87c-fd90-4643-a4b6-43c5de9bb65b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-789473346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10edb645f90c45edbd3aa43bfa24b86e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0769c926-1f", "ovs_interfaceid": "0769c926-1f70-44d4-8559-8b1f4e5aa14a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 823.917372] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:83:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e6db039c-542c-4544-a57d-ddcc6c1e8e45', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0769c926-1f70-44d4-8559-8b1f4e5aa14a', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 823.928357] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 823.931770] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 823.932316] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0664a11e-87fd-4af1-a0cc-9c4addffee7f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.955766] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 823.955766] env[68569]: value = "task-3167028" [ 823.955766] env[68569]: _type = "Task" [ 823.955766] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.968590] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167028, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.107997] env[68569]: DEBUG nova.compute.manager [req-9bf9852f-0d29-4807-a71a-b216217d2fd3 req-e9291db9-cea3-481c-8d1e-57043057f926 service nova] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Received event network-changed-0769c926-1f70-44d4-8559-8b1f4e5aa14a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 824.108329] env[68569]: DEBUG nova.compute.manager [req-9bf9852f-0d29-4807-a71a-b216217d2fd3 req-e9291db9-cea3-481c-8d1e-57043057f926 service nova] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Refreshing instance network info cache due to event network-changed-0769c926-1f70-44d4-8559-8b1f4e5aa14a. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 824.108636] env[68569]: DEBUG oslo_concurrency.lockutils [req-9bf9852f-0d29-4807-a71a-b216217d2fd3 req-e9291db9-cea3-481c-8d1e-57043057f926 service nova] Acquiring lock "refresh_cache-57a63648-83e9-4f23-aebc-050e58149ce2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.108887] env[68569]: DEBUG oslo_concurrency.lockutils [req-9bf9852f-0d29-4807-a71a-b216217d2fd3 req-e9291db9-cea3-481c-8d1e-57043057f926 service nova] Acquired lock "refresh_cache-57a63648-83e9-4f23-aebc-050e58149ce2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.109141] env[68569]: DEBUG nova.network.neutron [req-9bf9852f-0d29-4807-a71a-b216217d2fd3 req-e9291db9-cea3-481c-8d1e-57043057f926 service nova] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Refreshing network info cache for port 0769c926-1f70-44d4-8559-8b1f4e5aa14a {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 824.268036] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc77dad-eb86-4f5d-81ac-e67990aa8940 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.277492] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a41ed4-ade3-4a01-96b4-95d2c1aa1fb8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.315074] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32251d4-41ca-4f33-bead-590161541a41 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.324271] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771561c4-b6c1-40dd-8c79-5212c8b07268 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.342539] env[68569]: DEBUG nova.compute.provider_tree [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 824.344364] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquiring lock "50abc994-682a-40d6-ae77-601839b98793" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.344698] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Lock "50abc994-682a-40d6-ae77-601839b98793" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.344815] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquiring lock "50abc994-682a-40d6-ae77-601839b98793-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.344958] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Lock "50abc994-682a-40d6-ae77-601839b98793-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.345141] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Lock "50abc994-682a-40d6-ae77-601839b98793-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.348845] env[68569]: INFO nova.compute.manager [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Terminating instance [ 824.388705] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167027, 'name': Rename_Task, 'duration_secs': 0.441043} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.388705] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 824.388995] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b3b64c8-0d8a-47b6-8b1d-a25aebf241ed {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.402134] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 824.402134] env[68569]: value = "task-3167029" [ 824.402134] env[68569]: _type = "Task" [ 824.402134] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.416055] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167029, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.418630] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167025, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.603227} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.418984] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 824.421082] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d5a637-a189-4102-a712-fb68fe767551 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.448275] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 9eafa273-097b-48ac-ae5f-4f7a469ac861/9eafa273-097b-48ac-ae5f-4f7a469ac861.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 824.448519] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40d7f956-459d-4a4c-9677-c003ededf97c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.476939] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167028, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.478392] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Waiting for the task: (returnval){ [ 824.478392] env[68569]: value = "task-3167030" [ 824.478392] env[68569]: _type = "Task" [ 824.478392] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.487881] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167030, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.839131] env[68569]: DEBUG nova.network.neutron [req-9bf9852f-0d29-4807-a71a-b216217d2fd3 req-e9291db9-cea3-481c-8d1e-57043057f926 service nova] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Updated VIF entry in instance network info cache for port 0769c926-1f70-44d4-8559-8b1f4e5aa14a. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 824.839493] env[68569]: DEBUG nova.network.neutron [req-9bf9852f-0d29-4807-a71a-b216217d2fd3 req-e9291db9-cea3-481c-8d1e-57043057f926 service nova] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Updating instance_info_cache with network_info: [{"id": "0769c926-1f70-44d4-8559-8b1f4e5aa14a", "address": "fa:16:3e:23:83:69", "network": {"id": "a4dfe87c-fd90-4643-a4b6-43c5de9bb65b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-789473346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10edb645f90c45edbd3aa43bfa24b86e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0769c926-1f", "ovs_interfaceid": "0769c926-1f70-44d4-8559-8b1f4e5aa14a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.852439] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquiring lock "refresh_cache-50abc994-682a-40d6-ae77-601839b98793" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.852645] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquired lock "refresh_cache-50abc994-682a-40d6-ae77-601839b98793" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.852817] env[68569]: DEBUG nova.network.neutron [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 824.881507] env[68569]: DEBUG nova.scheduler.client.report [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 87 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 824.881628] env[68569]: DEBUG nova.compute.provider_tree [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 87 to 88 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 824.881792] env[68569]: DEBUG nova.compute.provider_tree [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 824.913528] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167029, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.977378] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167028, 'name': CreateVM_Task, 'duration_secs': 0.523132} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.977460] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 824.978174] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.978345] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.978719] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 824.978976] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f464cadb-f79b-482c-ae81-d90d552c552a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.988599] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 824.988599] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52cb2a0c-4ea2-2678-1566-ce83611d93f2" [ 824.988599] env[68569]: _type = "Task" [ 824.988599] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.991353] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167030, 'name': ReconfigVM_Task, 'duration_secs': 0.394893} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.994494] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 9eafa273-097b-48ac-ae5f-4f7a469ac861/9eafa273-097b-48ac-ae5f-4f7a469ac861.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.995229] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a3a3bbf9-5e5c-4491-956a-c653fee8bed8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.002918] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52cb2a0c-4ea2-2678-1566-ce83611d93f2, 'name': SearchDatastore_Task, 'duration_secs': 0.010864} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.004256] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.004508] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 825.004785] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.004950] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 825.005199] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 825.005533] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Waiting for the task: (returnval){ [ 825.005533] env[68569]: value = "task-3167031" [ 825.005533] env[68569]: _type = "Task" [ 825.005533] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.005714] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41f9f0cf-2078-4902-92a4-1f2bc497b3f7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.016023] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167031, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.017834] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 825.018087] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 825.018698] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5dc194b-ebae-473d-887b-fd53f26453f2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.024494] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 825.024494] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52aeb64d-b565-0321-0523-b6a438faf142" [ 825.024494] env[68569]: _type = "Task" [ 825.024494] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.033703] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52aeb64d-b565-0321-0523-b6a438faf142, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.343506] env[68569]: DEBUG oslo_concurrency.lockutils [req-9bf9852f-0d29-4807-a71a-b216217d2fd3 req-e9291db9-cea3-481c-8d1e-57043057f926 service nova] Releasing lock "refresh_cache-57a63648-83e9-4f23-aebc-050e58149ce2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.387149] env[68569]: DEBUG nova.network.neutron [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 825.389505] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.761s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.390010] env[68569]: DEBUG nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 825.392520] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.766s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.394947] env[68569]: INFO nova.compute.claims [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 825.421287] env[68569]: DEBUG oslo_vmware.api [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167029, 'name': PowerOnVM_Task, 'duration_secs': 0.535773} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.421287] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 825.421287] env[68569]: INFO nova.compute.manager [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Took 8.89 seconds to spawn the instance on the hypervisor. [ 825.421287] env[68569]: DEBUG nova.compute.manager [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 825.422329] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5299af89-d84b-4e5e-be3a-50e00c4bd511 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.480819] env[68569]: DEBUG nova.network.neutron [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.519212] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167031, 'name': Rename_Task, 'duration_secs': 0.189267} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.519398] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 825.519781] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4642ffee-8893-443d-b2ea-28300ce77a7a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.529686] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Waiting for the task: (returnval){ [ 825.529686] env[68569]: value = "task-3167032" [ 825.529686] env[68569]: _type = "Task" [ 825.529686] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.536852] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52aeb64d-b565-0321-0523-b6a438faf142, 'name': SearchDatastore_Task, 'duration_secs': 0.010064} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.537979] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82f623ae-fbab-40ed-acea-c03b9eb132dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.543258] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167032, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.546872] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 825.546872] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a20ed9-8c98-093e-bf33-2dffb97a3931" [ 825.546872] env[68569]: _type = "Task" [ 825.546872] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.555554] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a20ed9-8c98-093e-bf33-2dffb97a3931, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.898910] env[68569]: DEBUG nova.compute.utils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 825.900388] env[68569]: DEBUG nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 825.900619] env[68569]: DEBUG nova.network.neutron [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 825.942874] env[68569]: DEBUG nova.policy [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c4a384e5a314a83b75928dea39bf78c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10edb645f90c45edbd3aa43bfa24b86e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 825.948412] env[68569]: INFO nova.compute.manager [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Took 61.23 seconds to build instance. [ 825.984657] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Releasing lock "refresh_cache-50abc994-682a-40d6-ae77-601839b98793" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.984824] env[68569]: DEBUG nova.compute.manager [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 825.985067] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 825.986644] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0012ed65-b78f-4dc3-9086-79813aaeb798 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.995537] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 825.995798] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7fade873-87d2-4b02-bad2-3d9bc8d775b3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.004052] env[68569]: DEBUG oslo_vmware.api [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 826.004052] env[68569]: value = "task-3167033" [ 826.004052] env[68569]: _type = "Task" [ 826.004052] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.014937] env[68569]: DEBUG oslo_vmware.api [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3167033, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.047788] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167032, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.057335] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a20ed9-8c98-093e-bf33-2dffb97a3931, 'name': SearchDatastore_Task, 'duration_secs': 0.011505} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.057621] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 826.057965] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 57a63648-83e9-4f23-aebc-050e58149ce2/57a63648-83e9-4f23-aebc-050e58149ce2.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 826.058244] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7835339d-e660-4de1-bc5e-184742606c8a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.068021] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 826.068021] env[68569]: value = "task-3167034" [ 826.068021] env[68569]: _type = "Task" [ 826.068021] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.078867] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167034, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.360135] env[68569]: DEBUG nova.network.neutron [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Successfully created port: 72163272-5b38-43c3-8978-a346db826a81 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 826.404362] env[68569]: DEBUG nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 826.453982] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23f4bb44-e4df-4149-88bf-6ad8f9a7aadc tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "cc5139e1-4601-4966-9224-25b8223b8a57" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.464s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.525326] env[68569]: DEBUG oslo_vmware.api [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3167033, 'name': PowerOffVM_Task, 'duration_secs': 0.14481} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.525326] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 826.525326] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 826.525326] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-171d503d-7363-48e6-9dab-bbd36c644a1f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.546924] env[68569]: DEBUG oslo_vmware.api [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167032, 'name': PowerOnVM_Task, 'duration_secs': 0.841124} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.547496] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 826.547841] env[68569]: INFO nova.compute.manager [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Took 7.43 seconds to spawn the instance on the hypervisor. [ 826.548159] env[68569]: DEBUG nova.compute.manager [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 826.551839] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad253a6a-fe78-46f7-a648-9c6f0c733b61 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.564666] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 826.565210] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 826.565538] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Deleting the datastore file [datastore1] 50abc994-682a-40d6-ae77-601839b98793 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 826.566433] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad25fb01-da38-4db5-9036-ec9671a9cfd5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.580960] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167034, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.586164] env[68569]: DEBUG oslo_vmware.api [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for the task: (returnval){ [ 826.586164] env[68569]: value = "task-3167036" [ 826.586164] env[68569]: _type = "Task" [ 826.586164] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.941799] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831601ec-e86f-4811-83bd-c85cca909a55 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.950047] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad4c232-b18f-47e3-8f8f-7fca140980fe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.958139] env[68569]: DEBUG nova.compute.manager [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 826.990381] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189cb3c1-5376-47d4-ab6f-ba87a2b976b1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.999723] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8bd0eb1-cd84-437c-ad93-725af8c1d96e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.015493] env[68569]: DEBUG nova.compute.provider_tree [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.077664] env[68569]: INFO nova.compute.manager [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Took 59.35 seconds to build instance. [ 827.082979] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167034, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.096917] env[68569]: DEBUG oslo_vmware.api [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Task: {'id': task-3167036, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115696} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.098697] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 827.098903] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 827.099069] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 827.099238] env[68569]: INFO nova.compute.manager [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Took 1.11 seconds to destroy the instance on the hypervisor. [ 827.099561] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 827.099643] env[68569]: DEBUG nova.compute.manager [-] [instance: 50abc994-682a-40d6-ae77-601839b98793] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 827.099733] env[68569]: DEBUG nova.network.neutron [-] [instance: 50abc994-682a-40d6-ae77-601839b98793] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 827.120810] env[68569]: DEBUG nova.network.neutron [-] [instance: 50abc994-682a-40d6-ae77-601839b98793] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.415264] env[68569]: DEBUG nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 827.443753] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 827.443996] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 827.444170] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 827.444347] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 827.444482] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 827.444621] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 827.444822] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 827.444969] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 827.445725] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 827.445946] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 827.446299] env[68569]: DEBUG nova.virt.hardware [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 827.447386] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28e0602-1dfa-4ab7-ae84-a604fce8ff0e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.458842] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04efcfd-dbe4-4c14-ac16-daace43e1c02 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.480701] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.519296] env[68569]: DEBUG nova.scheduler.client.report [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 827.584463] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7acd5bc0-92f2-45da-91c5-bbdb13b8f6ef tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Lock "9eafa273-097b-48ac-ae5f-4f7a469ac861" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.449s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.584815] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167034, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.623405] env[68569]: DEBUG nova.network.neutron [-] [instance: 50abc994-682a-40d6-ae77-601839b98793] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.020981] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Acquiring lock "a591b671-ca84-47b5-9831-63478d55fd07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.021340] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Lock "a591b671-ca84-47b5-9831-63478d55fd07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.024577] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.632s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.025161] env[68569]: DEBUG nova.compute.manager [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 828.028581] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.612s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.032211] env[68569]: DEBUG nova.objects.instance [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lazy-loading 'resources' on Instance uuid 64146253-16ab-4d95-83c9-31b74014a040 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 828.084029] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167034, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.539584} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.084317] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 57a63648-83e9-4f23-aebc-050e58149ce2/57a63648-83e9-4f23-aebc-050e58149ce2.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 828.084538] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 828.084800] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a5a067af-1681-4807-b06b-be2e0c633e64 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.091024] env[68569]: DEBUG nova.compute.manager [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 828.098090] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 828.098090] env[68569]: value = "task-3167037" [ 828.098090] env[68569]: _type = "Task" [ 828.098090] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.111498] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167037, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.124435] env[68569]: DEBUG nova.compute.manager [req-0a7a3b13-e2b8-4811-b29a-a618b71e4b7d req-2103fc45-c7e6-48cb-8156-a7eee8d23a56 service nova] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Received event network-vif-plugged-72163272-5b38-43c3-8978-a346db826a81 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 828.124642] env[68569]: DEBUG oslo_concurrency.lockutils [req-0a7a3b13-e2b8-4811-b29a-a618b71e4b7d req-2103fc45-c7e6-48cb-8156-a7eee8d23a56 service nova] Acquiring lock "60aa85f3-edac-40e0-ad31-a8f93219e380-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.124841] env[68569]: DEBUG oslo_concurrency.lockutils [req-0a7a3b13-e2b8-4811-b29a-a618b71e4b7d req-2103fc45-c7e6-48cb-8156-a7eee8d23a56 service nova] Lock "60aa85f3-edac-40e0-ad31-a8f93219e380-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.125033] env[68569]: DEBUG oslo_concurrency.lockutils [req-0a7a3b13-e2b8-4811-b29a-a618b71e4b7d req-2103fc45-c7e6-48cb-8156-a7eee8d23a56 service nova] Lock "60aa85f3-edac-40e0-ad31-a8f93219e380-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.125179] env[68569]: DEBUG nova.compute.manager [req-0a7a3b13-e2b8-4811-b29a-a618b71e4b7d req-2103fc45-c7e6-48cb-8156-a7eee8d23a56 service nova] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] No waiting events found dispatching network-vif-plugged-72163272-5b38-43c3-8978-a346db826a81 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 828.125370] env[68569]: WARNING nova.compute.manager [req-0a7a3b13-e2b8-4811-b29a-a618b71e4b7d req-2103fc45-c7e6-48cb-8156-a7eee8d23a56 service nova] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Received unexpected event network-vif-plugged-72163272-5b38-43c3-8978-a346db826a81 for instance with vm_state building and task_state spawning. [ 828.126101] env[68569]: INFO nova.compute.manager [-] [instance: 50abc994-682a-40d6-ae77-601839b98793] Took 1.03 seconds to deallocate network for instance. [ 828.148177] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Acquiring lock "9eafa273-097b-48ac-ae5f-4f7a469ac861" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.148428] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Lock "9eafa273-097b-48ac-ae5f-4f7a469ac861" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.148838] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Acquiring lock "9eafa273-097b-48ac-ae5f-4f7a469ac861-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.148838] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Lock "9eafa273-097b-48ac-ae5f-4f7a469ac861-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.148951] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Lock "9eafa273-097b-48ac-ae5f-4f7a469ac861-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.151514] env[68569]: INFO nova.compute.manager [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Terminating instance [ 828.239827] env[68569]: DEBUG nova.network.neutron [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Successfully updated port: 72163272-5b38-43c3-8978-a346db826a81 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 828.538015] env[68569]: DEBUG nova.compute.utils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 828.541863] env[68569]: DEBUG nova.compute.manager [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 828.541863] env[68569]: DEBUG nova.network.neutron [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 828.599413] env[68569]: DEBUG nova.policy [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c9d77d804154a199681132cb34bf626', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8bd8ff748a34e7a83ec0edbfa148aac', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 828.612867] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167037, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071158} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.615570] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 828.616997] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cdedce-e446-4999-8099-d6095655f03d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.620563] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.634275] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.643512] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 57a63648-83e9-4f23-aebc-050e58149ce2/57a63648-83e9-4f23-aebc-050e58149ce2.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 828.649043] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2026fbc-5e5c-438d-acde-e71e8fd32a2b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.663694] env[68569]: DEBUG nova.compute.manager [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 828.663694] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 828.667442] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b508bf-7a89-4a64-8175-e78c699dbcd0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.672461] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 828.673802] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-127a89fd-2957-4188-ae51-67d22aa6a278 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.675487] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 828.675487] env[68569]: value = "task-3167038" [ 828.675487] env[68569]: _type = "Task" [ 828.675487] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.682450] env[68569]: DEBUG oslo_vmware.api [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Waiting for the task: (returnval){ [ 828.682450] env[68569]: value = "task-3167039" [ 828.682450] env[68569]: _type = "Task" [ 828.682450] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.688758] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167038, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.694921] env[68569]: DEBUG oslo_vmware.api [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167039, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.742886] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "refresh_cache-60aa85f3-edac-40e0-ad31-a8f93219e380" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.743067] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired lock "refresh_cache-60aa85f3-edac-40e0-ad31-a8f93219e380" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.743254] env[68569]: DEBUG nova.network.neutron [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 829.046093] env[68569]: DEBUG nova.compute.manager [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 829.059677] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b49bca-fe1b-43aa-b3c1-72f1e2c6253c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.066191] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ca07a2-8847-4926-a5d3-5f0d1726f75d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.104048] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9312a5-d3dc-4728-817a-25ac92176e07 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.114274] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178aa00e-5e91-4353-a5b1-ec6b62bf2a6a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.131570] env[68569]: DEBUG nova.compute.provider_tree [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.198915] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167038, 'name': ReconfigVM_Task, 'duration_secs': 0.480862} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.203036] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 57a63648-83e9-4f23-aebc-050e58149ce2/57a63648-83e9-4f23-aebc-050e58149ce2.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 829.203925] env[68569]: DEBUG oslo_vmware.api [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167039, 'name': PowerOffVM_Task, 'duration_secs': 0.24389} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.204501] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8fd5debb-31aa-4538-8cac-16b7d35f7ec7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.206148] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 829.206400] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 829.206686] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a362f840-7069-43ae-8144-fac04e613c15 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.213299] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 829.213299] env[68569]: value = "task-3167040" [ 829.213299] env[68569]: _type = "Task" [ 829.213299] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.222754] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167040, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.268043] env[68569]: DEBUG nova.network.neutron [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Successfully created port: 66ecd67f-062e-46e1-8ee0-6a4806e9d0e8 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 829.278729] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 829.279040] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 829.279373] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Deleting the datastore file [datastore1] 9eafa273-097b-48ac-ae5f-4f7a469ac861 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 829.279649] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61ba387f-9a35-4341-9619-92773be4705d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.286324] env[68569]: DEBUG oslo_vmware.api [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Waiting for the task: (returnval){ [ 829.286324] env[68569]: value = "task-3167042" [ 829.286324] env[68569]: _type = "Task" [ 829.286324] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.287238] env[68569]: DEBUG nova.network.neutron [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.298365] env[68569]: DEBUG oslo_vmware.api [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167042, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.453589] env[68569]: DEBUG nova.network.neutron [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Updating instance_info_cache with network_info: [{"id": "72163272-5b38-43c3-8978-a346db826a81", "address": "fa:16:3e:43:0e:d0", "network": {"id": "a4dfe87c-fd90-4643-a4b6-43c5de9bb65b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-789473346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10edb645f90c45edbd3aa43bfa24b86e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72163272-5b", "ovs_interfaceid": "72163272-5b38-43c3-8978-a346db826a81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.635223] env[68569]: DEBUG nova.scheduler.client.report [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 829.724984] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167040, 'name': Rename_Task, 'duration_secs': 0.172349} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.725282] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 829.725531] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-04c638b4-c83a-49bc-98d5-c3f4bae3cc01 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.738669] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Acquiring lock "db75de86-9dda-42b2-9e7a-55e2ba5adad1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.738921] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Lock "db75de86-9dda-42b2-9e7a-55e2ba5adad1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.740507] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 829.740507] env[68569]: value = "task-3167043" [ 829.740507] env[68569]: _type = "Task" [ 829.740507] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.749107] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167043, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.798438] env[68569]: DEBUG oslo_vmware.api [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Task: {'id': task-3167042, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166682} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.798821] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 829.799056] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 829.799606] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 829.799606] env[68569]: INFO nova.compute.manager [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Took 1.14 seconds to destroy the instance on the hypervisor. [ 829.799786] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 829.799862] env[68569]: DEBUG nova.compute.manager [-] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 829.799967] env[68569]: DEBUG nova.network.neutron [-] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 829.805768] env[68569]: DEBUG oslo_concurrency.lockutils [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "cc5139e1-4601-4966-9224-25b8223b8a57" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.807590] env[68569]: DEBUG oslo_concurrency.lockutils [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "cc5139e1-4601-4966-9224-25b8223b8a57" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.807590] env[68569]: DEBUG oslo_concurrency.lockutils [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "cc5139e1-4601-4966-9224-25b8223b8a57-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.807590] env[68569]: DEBUG oslo_concurrency.lockutils [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "cc5139e1-4601-4966-9224-25b8223b8a57-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.807590] env[68569]: DEBUG oslo_concurrency.lockutils [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "cc5139e1-4601-4966-9224-25b8223b8a57-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.809173] env[68569]: INFO nova.compute.manager [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Terminating instance [ 829.956078] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Releasing lock "refresh_cache-60aa85f3-edac-40e0-ad31-a8f93219e380" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.956452] env[68569]: DEBUG nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Instance network_info: |[{"id": "72163272-5b38-43c3-8978-a346db826a81", "address": "fa:16:3e:43:0e:d0", "network": {"id": "a4dfe87c-fd90-4643-a4b6-43c5de9bb65b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-789473346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10edb645f90c45edbd3aa43bfa24b86e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72163272-5b", "ovs_interfaceid": "72163272-5b38-43c3-8978-a346db826a81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 829.956893] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:0e:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e6db039c-542c-4544-a57d-ddcc6c1e8e45', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '72163272-5b38-43c3-8978-a346db826a81', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 829.966094] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 829.966348] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 829.966589] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f45eb95c-f84d-445a-ab5f-b3784dfbaf00 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.987803] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 829.987803] env[68569]: value = "task-3167044" [ 829.987803] env[68569]: _type = "Task" [ 829.987803] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.997072] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167044, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.043259] env[68569]: DEBUG nova.compute.manager [req-22d8fcc5-5525-486c-9339-a48b72b62ff0 req-886ad105-9e55-4e87-b2f4-2c312670c298 service nova] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Received event network-vif-deleted-87002a64-15a4-4a08-879c-483a9bf211c0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 830.043554] env[68569]: INFO nova.compute.manager [req-22d8fcc5-5525-486c-9339-a48b72b62ff0 req-886ad105-9e55-4e87-b2f4-2c312670c298 service nova] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Neutron deleted interface 87002a64-15a4-4a08-879c-483a9bf211c0; detaching it from the instance and deleting it from the info cache [ 830.043712] env[68569]: DEBUG nova.network.neutron [req-22d8fcc5-5525-486c-9339-a48b72b62ff0 req-886ad105-9e55-4e87-b2f4-2c312670c298 service nova] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.056017] env[68569]: DEBUG nova.compute.manager [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 830.083481] env[68569]: DEBUG nova.virt.hardware [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 830.083750] env[68569]: DEBUG nova.virt.hardware [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.083919] env[68569]: DEBUG nova.virt.hardware [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 830.084115] env[68569]: DEBUG nova.virt.hardware [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.084289] env[68569]: DEBUG nova.virt.hardware [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 830.084455] env[68569]: DEBUG nova.virt.hardware [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 830.084678] env[68569]: DEBUG nova.virt.hardware [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 830.084837] env[68569]: DEBUG nova.virt.hardware [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 830.085031] env[68569]: DEBUG nova.virt.hardware [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 830.085202] env[68569]: DEBUG nova.virt.hardware [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 830.085375] env[68569]: DEBUG nova.virt.hardware [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 830.086230] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9371cd82-cc43-4122-9e26-429fb6334091 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.094041] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240045c1-bcb8-4e5b-b566-e70202555baf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.143881] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.115s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.146485] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.499s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.146945] env[68569]: DEBUG nova.objects.instance [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Lazy-loading 'resources' on Instance uuid 98d5c760-6da3-49e3-af47-20a8054971f3 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 830.150951] env[68569]: DEBUG nova.compute.manager [req-35cf10d6-1a5c-44b7-ad32-5e2095ef6dfd req-d34689ce-9bd9-4894-832b-87444b659cf7 service nova] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Received event network-changed-72163272-5b38-43c3-8978-a346db826a81 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 830.150951] env[68569]: DEBUG nova.compute.manager [req-35cf10d6-1a5c-44b7-ad32-5e2095ef6dfd req-d34689ce-9bd9-4894-832b-87444b659cf7 service nova] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Refreshing instance network info cache due to event network-changed-72163272-5b38-43c3-8978-a346db826a81. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 830.151182] env[68569]: DEBUG oslo_concurrency.lockutils [req-35cf10d6-1a5c-44b7-ad32-5e2095ef6dfd req-d34689ce-9bd9-4894-832b-87444b659cf7 service nova] Acquiring lock "refresh_cache-60aa85f3-edac-40e0-ad31-a8f93219e380" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.151359] env[68569]: DEBUG oslo_concurrency.lockutils [req-35cf10d6-1a5c-44b7-ad32-5e2095ef6dfd req-d34689ce-9bd9-4894-832b-87444b659cf7 service nova] Acquired lock "refresh_cache-60aa85f3-edac-40e0-ad31-a8f93219e380" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.151506] env[68569]: DEBUG nova.network.neutron [req-35cf10d6-1a5c-44b7-ad32-5e2095ef6dfd req-d34689ce-9bd9-4894-832b-87444b659cf7 service nova] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Refreshing network info cache for port 72163272-5b38-43c3-8978-a346db826a81 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 830.172464] env[68569]: INFO nova.scheduler.client.report [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Deleted allocations for instance 64146253-16ab-4d95-83c9-31b74014a040 [ 830.252114] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167043, 'name': PowerOnVM_Task, 'duration_secs': 0.497048} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.252459] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 830.252687] env[68569]: INFO nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Took 8.61 seconds to spawn the instance on the hypervisor. [ 830.254726] env[68569]: DEBUG nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 830.254726] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e995e4ea-af57-43ea-8eb3-fb1278f1524c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.313725] env[68569]: DEBUG nova.compute.manager [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 830.313943] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 830.314862] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c49a9d8-cebb-4862-b76a-e6d7ef19b2cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.322614] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 830.323008] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a306c4e2-fdd7-4dd4-b766-9bafa1bd84f1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.329333] env[68569]: DEBUG oslo_vmware.api [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 830.329333] env[68569]: value = "task-3167045" [ 830.329333] env[68569]: _type = "Task" [ 830.329333] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.339173] env[68569]: DEBUG oslo_vmware.api [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167045, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.500073] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167044, 'name': CreateVM_Task, 'duration_secs': 0.38235} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.500073] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 830.500073] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.500073] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.500073] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 830.500073] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ad5aec5-a087-46ae-b0ee-dcbff4ac30c7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.503579] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 830.503579] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52441bfb-a08e-4ce2-98ce-130eecc7a80e" [ 830.503579] env[68569]: _type = "Task" [ 830.503579] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.511547] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52441bfb-a08e-4ce2-98ce-130eecc7a80e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.527016] env[68569]: DEBUG nova.network.neutron [-] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.546540] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-276d131c-6cfb-4d0b-a2e0-14a07fbc71d3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.559255] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088c7ed2-8959-4404-bdae-0c019bdbc261 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.589985] env[68569]: DEBUG nova.compute.manager [req-22d8fcc5-5525-486c-9339-a48b72b62ff0 req-886ad105-9e55-4e87-b2f4-2c312670c298 service nova] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Detach interface failed, port_id=87002a64-15a4-4a08-879c-483a9bf211c0, reason: Instance 9eafa273-097b-48ac-ae5f-4f7a469ac861 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 830.680938] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f8d76e3b-a51e-4f7f-baf3-2a730763e7dd tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "64146253-16ab-4d95-83c9-31b74014a040" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.913s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.776640] env[68569]: INFO nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Took 60.44 seconds to build instance. [ 830.843704] env[68569]: DEBUG oslo_vmware.api [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167045, 'name': PowerOffVM_Task, 'duration_secs': 0.194481} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.848463] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 830.848633] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 830.849088] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f02767ae-b1e1-4c11-8bb9-45865cfc95f8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.913869] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 830.913869] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 830.913869] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Deleting the datastore file [datastore1] cc5139e1-4601-4966-9224-25b8223b8a57 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 830.913869] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3e6d98a-8025-4980-a475-74a644858f0d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.918860] env[68569]: DEBUG oslo_vmware.api [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 830.918860] env[68569]: value = "task-3167047" [ 830.918860] env[68569]: _type = "Task" [ 830.918860] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.925666] env[68569]: DEBUG nova.network.neutron [req-35cf10d6-1a5c-44b7-ad32-5e2095ef6dfd req-d34689ce-9bd9-4894-832b-87444b659cf7 service nova] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Updated VIF entry in instance network info cache for port 72163272-5b38-43c3-8978-a346db826a81. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 830.926010] env[68569]: DEBUG nova.network.neutron [req-35cf10d6-1a5c-44b7-ad32-5e2095ef6dfd req-d34689ce-9bd9-4894-832b-87444b659cf7 service nova] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Updating instance_info_cache with network_info: [{"id": "72163272-5b38-43c3-8978-a346db826a81", "address": "fa:16:3e:43:0e:d0", "network": {"id": "a4dfe87c-fd90-4643-a4b6-43c5de9bb65b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-789473346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10edb645f90c45edbd3aa43bfa24b86e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72163272-5b", "ovs_interfaceid": "72163272-5b38-43c3-8978-a346db826a81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.933392] env[68569]: DEBUG oslo_vmware.api [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.949865] env[68569]: DEBUG nova.network.neutron [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Successfully updated port: 66ecd67f-062e-46e1-8ee0-6a4806e9d0e8 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 831.013674] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52441bfb-a08e-4ce2-98ce-130eecc7a80e, 'name': SearchDatastore_Task, 'duration_secs': 0.010942} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.016161] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.016270] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 831.016495] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.016643] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.016819] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 831.017255] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad7e5809-a12f-4feb-8568-ef1a8d70254a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.025754] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 831.026120] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 831.027026] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2604bda8-6ae9-4dae-aa3f-fda749b8ccf9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.031489] env[68569]: INFO nova.compute.manager [-] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Took 1.23 seconds to deallocate network for instance. [ 831.035990] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 831.035990] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a0e5f9-d3cf-e4b2-1fd5-9a537c99b422" [ 831.035990] env[68569]: _type = "Task" [ 831.035990] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.048164] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a0e5f9-d3cf-e4b2-1fd5-9a537c99b422, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.104577] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb9809b-c854-4a26-b311-169decfb5ad2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.112371] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b41654-8e56-4e6d-a5fc-f8c36cd3773d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.142946] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032530d2-9d48-4586-933e-48e1f9d33ecd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.150707] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0090ad19-a6a4-474b-9726-771e088c2234 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.165467] env[68569]: DEBUG nova.compute.provider_tree [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.281295] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "57a63648-83e9-4f23-aebc-050e58149ce2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.080s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.429427] env[68569]: DEBUG oslo_vmware.api [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167047, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.457352} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.429708] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 831.429893] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 831.430085] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 831.430260] env[68569]: INFO nova.compute.manager [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Took 1.12 seconds to destroy the instance on the hypervisor. [ 831.430500] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 831.430922] env[68569]: DEBUG oslo_concurrency.lockutils [req-35cf10d6-1a5c-44b7-ad32-5e2095ef6dfd req-d34689ce-9bd9-4894-832b-87444b659cf7 service nova] Releasing lock "refresh_cache-60aa85f3-edac-40e0-ad31-a8f93219e380" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.431262] env[68569]: DEBUG nova.compute.manager [-] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 831.431377] env[68569]: DEBUG nova.network.neutron [-] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 831.454895] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "refresh_cache-b770fbd1-579a-4e3e-a5c9-9f030695f057" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.455008] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "refresh_cache-b770fbd1-579a-4e3e-a5c9-9f030695f057" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.455170] env[68569]: DEBUG nova.network.neutron [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.546914] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.548027] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a0e5f9-d3cf-e4b2-1fd5-9a537c99b422, 'name': SearchDatastore_Task, 'duration_secs': 0.011968} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.548027] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10185558-48fa-4565-9e76-1014e8eff158 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.553188] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 831.553188] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52dd9d7b-dd48-b4a7-c8a3-c031f5a197ea" [ 831.553188] env[68569]: _type = "Task" [ 831.553188] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.565961] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52dd9d7b-dd48-b4a7-c8a3-c031f5a197ea, 'name': SearchDatastore_Task, 'duration_secs': 0.009394} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.566410] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.566543] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 60aa85f3-edac-40e0-ad31-a8f93219e380/60aa85f3-edac-40e0-ad31-a8f93219e380.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 831.567022] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d765f30-86cf-4f8c-a2f2-e856efdd8393 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.573648] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 831.573648] env[68569]: value = "task-3167048" [ 831.573648] env[68569]: _type = "Task" [ 831.573648] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.582658] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167048, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.669216] env[68569]: DEBUG nova.scheduler.client.report [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 831.783868] env[68569]: DEBUG nova.compute.manager [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 831.987216] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "c634f7eb-2f71-473d-8f90-71d74edffecb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.987518] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "c634f7eb-2f71-473d-8f90-71d74edffecb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.987816] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "c634f7eb-2f71-473d-8f90-71d74edffecb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.988105] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "c634f7eb-2f71-473d-8f90-71d74edffecb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.988395] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "c634f7eb-2f71-473d-8f90-71d74edffecb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.990829] env[68569]: INFO nova.compute.manager [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Terminating instance [ 832.014157] env[68569]: DEBUG nova.network.neutron [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.063079] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.063079] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.063079] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.063079] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.063320] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.065807] env[68569]: INFO nova.compute.manager [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Terminating instance [ 832.084917] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167048, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461152} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.087426] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 60aa85f3-edac-40e0-ad31-a8f93219e380/60aa85f3-edac-40e0-ad31-a8f93219e380.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 832.087645] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 832.087885] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a37683e-b4f0-488e-b74d-4c8c0bb7a8eb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.094294] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 832.094294] env[68569]: value = "task-3167049" [ 832.094294] env[68569]: _type = "Task" [ 832.094294] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.102049] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167049, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.163130] env[68569]: DEBUG nova.network.neutron [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Updating instance_info_cache with network_info: [{"id": "66ecd67f-062e-46e1-8ee0-6a4806e9d0e8", "address": "fa:16:3e:8a:16:7a", "network": {"id": "cf9f728e-bea5-495e-8933-c454fb6f87a7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1703203184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8bd8ff748a34e7a83ec0edbfa148aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66ecd67f-06", "ovs_interfaceid": "66ecd67f-062e-46e1-8ee0-6a4806e9d0e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.174062] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.028s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.177204] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.888s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.177204] env[68569]: DEBUG nova.objects.instance [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Lazy-loading 'resources' on Instance uuid 98efdafe-e02b-46ca-a701-b70042513128 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 832.181010] env[68569]: DEBUG nova.compute.manager [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Received event network-vif-plugged-66ecd67f-062e-46e1-8ee0-6a4806e9d0e8 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 832.181250] env[68569]: DEBUG oslo_concurrency.lockutils [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] Acquiring lock "b770fbd1-579a-4e3e-a5c9-9f030695f057-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.181394] env[68569]: DEBUG oslo_concurrency.lockutils [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] Lock "b770fbd1-579a-4e3e-a5c9-9f030695f057-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.181549] env[68569]: DEBUG oslo_concurrency.lockutils [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] Lock "b770fbd1-579a-4e3e-a5c9-9f030695f057-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.181758] env[68569]: DEBUG nova.compute.manager [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] No waiting events found dispatching network-vif-plugged-66ecd67f-062e-46e1-8ee0-6a4806e9d0e8 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 832.181858] env[68569]: WARNING nova.compute.manager [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Received unexpected event network-vif-plugged-66ecd67f-062e-46e1-8ee0-6a4806e9d0e8 for instance with vm_state building and task_state spawning. [ 832.182020] env[68569]: DEBUG nova.compute.manager [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Received event network-changed-66ecd67f-062e-46e1-8ee0-6a4806e9d0e8 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 832.182646] env[68569]: DEBUG nova.compute.manager [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Refreshing instance network info cache due to event network-changed-66ecd67f-062e-46e1-8ee0-6a4806e9d0e8. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 832.182646] env[68569]: DEBUG oslo_concurrency.lockutils [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] Acquiring lock "refresh_cache-b770fbd1-579a-4e3e-a5c9-9f030695f057" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.197667] env[68569]: INFO nova.scheduler.client.report [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Deleted allocations for instance 98d5c760-6da3-49e3-af47-20a8054971f3 [ 832.240439] env[68569]: DEBUG nova.network.neutron [-] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.302251] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.497219] env[68569]: DEBUG nova.compute.manager [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 832.497474] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 832.498406] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a329c1-64fa-4906-9db3-e92a0db9316b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.506216] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 832.506452] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af840a56-b4ca-4c01-b8cf-3b14e5375f37 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.512717] env[68569]: DEBUG oslo_vmware.api [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 832.512717] env[68569]: value = "task-3167050" [ 832.512717] env[68569]: _type = "Task" [ 832.512717] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.520359] env[68569]: DEBUG oslo_vmware.api [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3167050, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.569363] env[68569]: DEBUG nova.compute.manager [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 832.569625] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 832.570618] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783469a4-1c90-49f1-824e-32ba8536e801 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.579376] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 832.579735] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d53d351a-50c8-4e7b-be18-b2bef4ea88cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.587116] env[68569]: DEBUG oslo_vmware.api [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 832.587116] env[68569]: value = "task-3167051" [ 832.587116] env[68569]: _type = "Task" [ 832.587116] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.596245] env[68569]: DEBUG oslo_vmware.api [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3167051, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.603814] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167049, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.051827} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.604086] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 832.604883] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594e5545-d2af-421c-b30d-5dafaf0ce762 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.627299] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 60aa85f3-edac-40e0-ad31-a8f93219e380/60aa85f3-edac-40e0-ad31-a8f93219e380.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 832.627615] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70b0568d-8824-4628-ae96-86f7c0674a70 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.646992] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 832.646992] env[68569]: value = "task-3167052" [ 832.646992] env[68569]: _type = "Task" [ 832.646992] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.655377] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167052, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.666053] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "refresh_cache-b770fbd1-579a-4e3e-a5c9-9f030695f057" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.666495] env[68569]: DEBUG nova.compute.manager [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Instance network_info: |[{"id": "66ecd67f-062e-46e1-8ee0-6a4806e9d0e8", "address": "fa:16:3e:8a:16:7a", "network": {"id": "cf9f728e-bea5-495e-8933-c454fb6f87a7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1703203184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8bd8ff748a34e7a83ec0edbfa148aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66ecd67f-06", "ovs_interfaceid": "66ecd67f-062e-46e1-8ee0-6a4806e9d0e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 832.666926] env[68569]: DEBUG oslo_concurrency.lockutils [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] Acquired lock "refresh_cache-b770fbd1-579a-4e3e-a5c9-9f030695f057" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.666991] env[68569]: DEBUG nova.network.neutron [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Refreshing network info cache for port 66ecd67f-062e-46e1-8ee0-6a4806e9d0e8 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 832.668375] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:16:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd33839ae-40ca-471b-92e3-eb282b920682', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66ecd67f-062e-46e1-8ee0-6a4806e9d0e8', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 832.676578] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 832.677708] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 832.680593] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8509d87b-f25f-49f2-8cc2-01da5a651504 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.705606] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 832.705606] env[68569]: value = "task-3167053" [ 832.705606] env[68569]: _type = "Task" [ 832.705606] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.706194] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4ad1743-7871-41ff-92c6-0e2e2889427d tempest-ServersTestFqdnHostnames-666855204 tempest-ServersTestFqdnHostnames-666855204-project-member] Lock "98d5c760-6da3-49e3-af47-20a8054971f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.920s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.718056] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167053, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.742759] env[68569]: INFO nova.compute.manager [-] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Took 1.31 seconds to deallocate network for instance. [ 833.024720] env[68569]: DEBUG oslo_vmware.api [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3167050, 'name': PowerOffVM_Task, 'duration_secs': 0.261618} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.025038] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 833.025171] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 833.025490] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-973afc7d-efeb-4895-a5c6-6dd37522e650 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.096408] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 833.096631] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 833.096812] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Deleting the datastore file [datastore1] c634f7eb-2f71-473d-8f90-71d74edffecb {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 833.097485] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-432b653d-3d33-47c6-83c5-c932e1987c1c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.106349] env[68569]: DEBUG oslo_vmware.api [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3167051, 'name': PowerOffVM_Task, 'duration_secs': 0.248091} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.107257] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 833.107513] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 833.107793] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bdb3411f-93f0-4d16-b368-2b09c6c08095 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.111127] env[68569]: DEBUG oslo_vmware.api [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 833.111127] env[68569]: value = "task-3167055" [ 833.111127] env[68569]: _type = "Task" [ 833.111127] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.119955] env[68569]: DEBUG oslo_vmware.api [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3167055, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.136590] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed741b74-67f3-4f50-8c37-e83996b75991 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.144344] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806874a4-6a5f-41ed-8e43-54d1d199e87a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.181527] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167052, 'name': ReconfigVM_Task, 'duration_secs': 0.428307} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.185440] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 60aa85f3-edac-40e0-ad31-a8f93219e380/60aa85f3-edac-40e0-ad31-a8f93219e380.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 833.186584] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c985f0-282e-4d55-9cc2-92bebcf1ebb9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.189111] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 833.189309] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 833.189486] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Deleting the datastore file [datastore1] ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 833.189709] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-43393a2f-6eae-49f8-8079-020b0eea5187 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.191403] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a82f9743-270f-4ed8-8a59-db3a526b5270 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.201049] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b9e4d2-b1af-4238-80a2-b91d2de20568 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.205208] env[68569]: DEBUG oslo_vmware.api [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for the task: (returnval){ [ 833.205208] env[68569]: value = "task-3167058" [ 833.205208] env[68569]: _type = "Task" [ 833.205208] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.206570] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 833.206570] env[68569]: value = "task-3167057" [ 833.206570] env[68569]: _type = "Task" [ 833.206570] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.221129] env[68569]: DEBUG nova.compute.provider_tree [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.233317] env[68569]: DEBUG oslo_vmware.api [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3167058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.241083] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167053, 'name': CreateVM_Task, 'duration_secs': 0.404483} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.241083] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167057, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.241083] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 833.241318] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.241374] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.241660] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 833.242172] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31b4aef2-840b-4a45-bb20-c1865972db3c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.247292] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 833.247292] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f7afe4-6f0e-edc5-0f3e-402ee869e4af" [ 833.247292] env[68569]: _type = "Task" [ 833.247292] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.256892] env[68569]: DEBUG oslo_concurrency.lockutils [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.256892] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f7afe4-6f0e-edc5-0f3e-402ee869e4af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.421862] env[68569]: DEBUG nova.network.neutron [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Updated VIF entry in instance network info cache for port 66ecd67f-062e-46e1-8ee0-6a4806e9d0e8. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 833.422273] env[68569]: DEBUG nova.network.neutron [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Updating instance_info_cache with network_info: [{"id": "66ecd67f-062e-46e1-8ee0-6a4806e9d0e8", "address": "fa:16:3e:8a:16:7a", "network": {"id": "cf9f728e-bea5-495e-8933-c454fb6f87a7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1703203184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8bd8ff748a34e7a83ec0edbfa148aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66ecd67f-06", "ovs_interfaceid": "66ecd67f-062e-46e1-8ee0-6a4806e9d0e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.621576] env[68569]: DEBUG oslo_vmware.api [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3167055, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.481052} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.624860] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 833.624860] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 833.624860] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 833.624860] env[68569]: INFO nova.compute.manager [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Took 1.13 seconds to destroy the instance on the hypervisor. [ 833.624860] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 833.625073] env[68569]: DEBUG nova.compute.manager [-] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 833.625073] env[68569]: DEBUG nova.network.neutron [-] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 833.718770] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167057, 'name': Rename_Task, 'duration_secs': 0.285958} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.721762] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 833.722129] env[68569]: DEBUG oslo_vmware.api [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Task: {'id': task-3167058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.421397} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.722385] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-920509ef-0f29-4c0d-a009-b92635735d21 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.723885] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 833.724125] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 833.724386] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 833.724551] env[68569]: INFO nova.compute.manager [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Took 1.15 seconds to destroy the instance on the hypervisor. [ 833.724786] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 833.725154] env[68569]: DEBUG nova.compute.manager [-] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 833.725305] env[68569]: DEBUG nova.network.neutron [-] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 833.729538] env[68569]: DEBUG nova.scheduler.client.report [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 833.734594] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 833.734594] env[68569]: value = "task-3167059" [ 833.734594] env[68569]: _type = "Task" [ 833.734594] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.742549] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167059, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.761852] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f7afe4-6f0e-edc5-0f3e-402ee869e4af, 'name': SearchDatastore_Task, 'duration_secs': 0.029355} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.762202] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.762969] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.763716] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.763716] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.763716] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.763835] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a559789-dea3-4996-a7e3-adcad69f7ede {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.772222] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.772524] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 833.774360] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-365e9b76-80fd-41f1-a3f4-73bd5d0deda8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.780680] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 833.780680] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a1eee0-3350-7a38-7307-9032b45378b8" [ 833.780680] env[68569]: _type = "Task" [ 833.780680] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.789542] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a1eee0-3350-7a38-7307-9032b45378b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.927241] env[68569]: DEBUG oslo_concurrency.lockutils [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] Releasing lock "refresh_cache-b770fbd1-579a-4e3e-a5c9-9f030695f057" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.927241] env[68569]: DEBUG nova.compute.manager [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Received event network-vif-deleted-3250ac37-e76e-40a8-a2d7-a0ad04d568f3 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 833.927241] env[68569]: INFO nova.compute.manager [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Neutron deleted interface 3250ac37-e76e-40a8-a2d7-a0ad04d568f3; detaching it from the instance and deleting it from the info cache [ 833.927241] env[68569]: DEBUG nova.network.neutron [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.107896] env[68569]: DEBUG nova.compute.manager [req-ef772a13-4455-4a10-8d7b-31eec59fbc5f req-1ad18d0f-039c-45c1-9711-bdd0fcf9d188 service nova] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Received event network-vif-deleted-d682f08f-b00c-4ab2-bfd2-ffe374890b52 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 834.108155] env[68569]: INFO nova.compute.manager [req-ef772a13-4455-4a10-8d7b-31eec59fbc5f req-1ad18d0f-039c-45c1-9711-bdd0fcf9d188 service nova] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Neutron deleted interface d682f08f-b00c-4ab2-bfd2-ffe374890b52; detaching it from the instance and deleting it from the info cache [ 834.108286] env[68569]: DEBUG nova.network.neutron [req-ef772a13-4455-4a10-8d7b-31eec59fbc5f req-1ad18d0f-039c-45c1-9711-bdd0fcf9d188 service nova] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.211663] env[68569]: DEBUG nova.compute.manager [req-e13b0e57-2add-4b07-9409-65cc86ce1cc1 req-40d6e4ce-abae-4491-bb32-6bf73a309239 service nova] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Received event network-vif-deleted-cdf95c3f-e426-4563-995a-6fcfe2ffa912 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 834.211949] env[68569]: INFO nova.compute.manager [req-e13b0e57-2add-4b07-9409-65cc86ce1cc1 req-40d6e4ce-abae-4491-bb32-6bf73a309239 service nova] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Neutron deleted interface cdf95c3f-e426-4563-995a-6fcfe2ffa912; detaching it from the instance and deleting it from the info cache [ 834.212056] env[68569]: DEBUG nova.network.neutron [req-e13b0e57-2add-4b07-9409-65cc86ce1cc1 req-40d6e4ce-abae-4491-bb32-6bf73a309239 service nova] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.235757] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.059s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.238876] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 49.223s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.240564] env[68569]: INFO nova.compute.claims [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 834.252106] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167059, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.291687] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a1eee0-3350-7a38-7307-9032b45378b8, 'name': SearchDatastore_Task, 'duration_secs': 0.026576} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.293233] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1036bba3-c914-4636-a0e6-ce49ba699e81 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.299534] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 834.299534] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e6033b-ae00-17e7-803b-ac2175d0a72b" [ 834.299534] env[68569]: _type = "Task" [ 834.299534] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.308063] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e6033b-ae00-17e7-803b-ac2175d0a72b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.341424] env[68569]: DEBUG nova.network.neutron [-] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.353482] env[68569]: INFO nova.scheduler.client.report [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Deleted allocations for instance 98efdafe-e02b-46ca-a701-b70042513128 [ 834.431420] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-318ccb5f-f8f8-470e-b61d-3287ddfab19b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.441861] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5551cd9f-b56c-42cd-97ad-d3bab306db75 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.476372] env[68569]: DEBUG nova.compute.manager [req-6027fc4b-f409-4849-b924-db019c055995 req-aac0d9de-a1db-4afc-a4f0-eb6572ead853 service nova] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Detach interface failed, port_id=3250ac37-e76e-40a8-a2d7-a0ad04d568f3, reason: Instance cc5139e1-4601-4966-9224-25b8223b8a57 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 834.587917] env[68569]: DEBUG nova.network.neutron [-] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.610587] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-58ad301d-ed20-4e50-9200-69c91ac03d77 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.622371] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03aa6b1-a088-4604-975c-fab7ca6e9f38 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.660568] env[68569]: DEBUG nova.compute.manager [req-ef772a13-4455-4a10-8d7b-31eec59fbc5f req-1ad18d0f-039c-45c1-9711-bdd0fcf9d188 service nova] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Detach interface failed, port_id=d682f08f-b00c-4ab2-bfd2-ffe374890b52, reason: Instance ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 834.715353] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a48c5ce2-ebd8-4da6-ae07-1755b2dee071 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.726777] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae02aaea-9adf-432c-a637-ec71ed58dd43 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.748940] env[68569]: DEBUG oslo_vmware.api [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167059, 'name': PowerOnVM_Task, 'duration_secs': 0.982703} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.761600] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 834.762237] env[68569]: INFO nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Took 7.35 seconds to spawn the instance on the hypervisor. [ 834.762237] env[68569]: DEBUG nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 834.762402] env[68569]: DEBUG nova.compute.manager [req-e13b0e57-2add-4b07-9409-65cc86ce1cc1 req-40d6e4ce-abae-4491-bb32-6bf73a309239 service nova] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Detach interface failed, port_id=cdf95c3f-e426-4563-995a-6fcfe2ffa912, reason: Instance c634f7eb-2f71-473d-8f90-71d74edffecb could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 834.764249] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335aa3ca-c59e-4a03-a3e2-92aa72c097f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.810589] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e6033b-ae00-17e7-803b-ac2175d0a72b, 'name': SearchDatastore_Task, 'duration_secs': 0.023178} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.810589] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.810589] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] b770fbd1-579a-4e3e-a5c9-9f030695f057/b770fbd1-579a-4e3e-a5c9-9f030695f057.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.810589] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03c244d5-14bd-4504-ae4f-89a33734e9ef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.817864] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 834.817864] env[68569]: value = "task-3167060" [ 834.817864] env[68569]: _type = "Task" [ 834.817864] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.825808] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167060, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.843985] env[68569]: INFO nova.compute.manager [-] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Took 1.22 seconds to deallocate network for instance. [ 834.862132] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8ed01a3c-3b14-4875-b4cb-a324b4bf764e tempest-AttachInterfacesV270Test-555980489 tempest-AttachInterfacesV270Test-555980489-project-member] Lock "98efdafe-e02b-46ca-a701-b70042513128" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.077s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.097311] env[68569]: INFO nova.compute.manager [-] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Took 1.37 seconds to deallocate network for instance. [ 835.287863] env[68569]: INFO nova.compute.manager [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Took 60.22 seconds to build instance. [ 835.331986] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167060, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.352434] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.604085] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.738934] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb9e8ff-e3bb-475f-853d-748af6e28802 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.748107] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1302d720-f012-44d9-a7f6-360562bf21c2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.780843] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57654fed-c1ab-4140-9853-1ab987917e34 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.788521] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dacb064-98d9-49fa-aaa5-dbe1200448ea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.793945] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa1af9e8-87cd-40db-b9ba-a8227bfa6077 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "60aa85f3-edac-40e0-ad31-a8f93219e380" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.567s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.806503] env[68569]: DEBUG nova.compute.provider_tree [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.827829] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167060, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541382} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.828110] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] b770fbd1-579a-4e3e-a5c9-9f030695f057/b770fbd1-579a-4e3e-a5c9-9f030695f057.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.828320] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.828557] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-51bd86cd-f402-41aa-be2d-dbbc5f19e1ef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.835010] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 835.835010] env[68569]: value = "task-3167061" [ 835.835010] env[68569]: _type = "Task" [ 835.835010] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.843890] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167061, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.993842] env[68569]: DEBUG oslo_concurrency.lockutils [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "57a63648-83e9-4f23-aebc-050e58149ce2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.994132] env[68569]: DEBUG oslo_concurrency.lockutils [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "57a63648-83e9-4f23-aebc-050e58149ce2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.994344] env[68569]: DEBUG oslo_concurrency.lockutils [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "57a63648-83e9-4f23-aebc-050e58149ce2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.994552] env[68569]: DEBUG oslo_concurrency.lockutils [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "57a63648-83e9-4f23-aebc-050e58149ce2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.994786] env[68569]: DEBUG oslo_concurrency.lockutils [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "57a63648-83e9-4f23-aebc-050e58149ce2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.997139] env[68569]: INFO nova.compute.manager [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Terminating instance [ 836.119845] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "60aa85f3-edac-40e0-ad31-a8f93219e380" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.122460] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "60aa85f3-edac-40e0-ad31-a8f93219e380" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.122460] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "60aa85f3-edac-40e0-ad31-a8f93219e380-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.122460] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "60aa85f3-edac-40e0-ad31-a8f93219e380-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.122460] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "60aa85f3-edac-40e0-ad31-a8f93219e380-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.123511] env[68569]: INFO nova.compute.manager [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Terminating instance [ 836.296996] env[68569]: DEBUG nova.compute.manager [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 836.312666] env[68569]: DEBUG nova.scheduler.client.report [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 836.351287] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167061, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088725} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.351512] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 836.352690] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7af2f8-50a4-44eb-a6e1-e4b002f2e2aa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.380128] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] b770fbd1-579a-4e3e-a5c9-9f030695f057/b770fbd1-579a-4e3e-a5c9-9f030695f057.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 836.380727] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-683e3c08-168e-47cd-8a37-8576d7c94ba8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.403394] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 836.403394] env[68569]: value = "task-3167062" [ 836.403394] env[68569]: _type = "Task" [ 836.403394] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.412945] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167062, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.501079] env[68569]: DEBUG nova.compute.manager [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 836.501447] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 836.502234] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d4c700-1acb-4b3c-94e7-322171f59002 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.510292] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 836.510437] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b532a821-3284-4c3c-a31f-82edee9bac17 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.517233] env[68569]: DEBUG oslo_vmware.api [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 836.517233] env[68569]: value = "task-3167063" [ 836.517233] env[68569]: _type = "Task" [ 836.517233] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.525141] env[68569]: DEBUG oslo_vmware.api [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167063, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.632772] env[68569]: DEBUG nova.compute.manager [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 836.632772] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 836.633464] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0c81ad-9b13-46f7-aaaa-db3103b4b171 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.643444] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 836.643697] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0cdfe5af-2dbd-4589-96db-f26988a214bb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.650517] env[68569]: DEBUG oslo_vmware.api [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 836.650517] env[68569]: value = "task-3167064" [ 836.650517] env[68569]: _type = "Task" [ 836.650517] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.660554] env[68569]: DEBUG oslo_vmware.api [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167064, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.821089] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.582s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.821892] env[68569]: DEBUG nova.compute.manager [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 836.826420] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 48.928s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.827863] env[68569]: INFO nova.compute.claims [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 836.833569] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.915027] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167062, 'name': ReconfigVM_Task, 'duration_secs': 0.39262} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.915027] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Reconfigured VM instance instance-00000039 to attach disk [datastore1] b770fbd1-579a-4e3e-a5c9-9f030695f057/b770fbd1-579a-4e3e-a5c9-9f030695f057.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.915027] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4355afc5-5d6b-45f6-98bd-0ca9eb91c7c5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.919804] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 836.919804] env[68569]: value = "task-3167065" [ 836.919804] env[68569]: _type = "Task" [ 836.919804] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.928122] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167065, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.026907] env[68569]: DEBUG oslo_vmware.api [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167063, 'name': PowerOffVM_Task, 'duration_secs': 0.413966} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.027231] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 837.027400] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 837.027695] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b451b16d-b4c3-428b-93b7-f11ae5f2d0f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.090015] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 837.090791] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 837.091190] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Deleting the datastore file [datastore1] 57a63648-83e9-4f23-aebc-050e58149ce2 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 837.091902] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6faeca07-5762-4511-8a9b-d040aaa7c5dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.100703] env[68569]: DEBUG oslo_vmware.api [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 837.100703] env[68569]: value = "task-3167067" [ 837.100703] env[68569]: _type = "Task" [ 837.100703] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.109663] env[68569]: DEBUG oslo_vmware.api [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167067, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.166434] env[68569]: DEBUG oslo_vmware.api [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167064, 'name': PowerOffVM_Task, 'duration_secs': 0.284512} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.166771] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 837.166949] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 837.167235] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3fc76166-b3ae-47ee-9f4c-db4d98b79c5c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.231757] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 837.232066] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 837.232182] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Deleting the datastore file [datastore1] 60aa85f3-edac-40e0-ad31-a8f93219e380 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 837.232529] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ccb99841-f799-40dd-aaa7-66f9a713759d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.250186] env[68569]: DEBUG oslo_vmware.api [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for the task: (returnval){ [ 837.250186] env[68569]: value = "task-3167069" [ 837.250186] env[68569]: _type = "Task" [ 837.250186] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.263371] env[68569]: DEBUG oslo_vmware.api [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167069, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.338409] env[68569]: DEBUG nova.compute.utils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 837.339052] env[68569]: DEBUG nova.compute.manager [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 837.339201] env[68569]: DEBUG nova.network.neutron [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 837.429895] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167065, 'name': Rename_Task, 'duration_secs': 0.135916} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.430320] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 837.430587] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f18ae6b-01da-496f-9e9b-9f3a592ca105 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.433351] env[68569]: DEBUG nova.policy [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afebab35cda9438781e2b466ce586405', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cc82d358e214a959ae6b34c33344b86', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 837.440043] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 837.440043] env[68569]: value = "task-3167070" [ 837.440043] env[68569]: _type = "Task" [ 837.440043] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.448490] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167070, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.612073] env[68569]: DEBUG oslo_vmware.api [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167067, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14781} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.612345] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 837.612631] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 837.612817] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 837.612987] env[68569]: INFO nova.compute.manager [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Took 1.11 seconds to destroy the instance on the hypervisor. [ 837.613257] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 837.613456] env[68569]: DEBUG nova.compute.manager [-] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 837.613552] env[68569]: DEBUG nova.network.neutron [-] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 837.762102] env[68569]: DEBUG oslo_vmware.api [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Task: {'id': task-3167069, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14626} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.762444] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 837.762772] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 837.762900] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 837.763329] env[68569]: INFO nova.compute.manager [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Took 1.13 seconds to destroy the instance on the hypervisor. [ 837.763443] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 837.763670] env[68569]: DEBUG nova.compute.manager [-] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 837.763775] env[68569]: DEBUG nova.network.neutron [-] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 837.846187] env[68569]: DEBUG nova.compute.manager [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 837.849122] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquiring lock "559a1eff-8892-4dda-a540-4a053ae0ef2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.849393] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Lock "559a1eff-8892-4dda-a540-4a053ae0ef2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.954192] env[68569]: DEBUG oslo_vmware.api [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167070, 'name': PowerOnVM_Task, 'duration_secs': 0.45044} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.956920] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 837.957172] env[68569]: INFO nova.compute.manager [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Took 7.90 seconds to spawn the instance on the hypervisor. [ 837.957352] env[68569]: DEBUG nova.compute.manager [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 837.959259] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d20e79-2121-49a8-96a1-c5a4189ef639 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.001224] env[68569]: DEBUG nova.network.neutron [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Successfully created port: 3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.203093] env[68569]: DEBUG nova.compute.manager [req-83fdc558-33d1-4936-8ddd-d0c41b3c730d req-72eea2a1-cae3-43e5-97fd-5b2ab695694d service nova] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Received event network-vif-deleted-0769c926-1f70-44d4-8559-8b1f4e5aa14a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 838.203373] env[68569]: INFO nova.compute.manager [req-83fdc558-33d1-4936-8ddd-d0c41b3c730d req-72eea2a1-cae3-43e5-97fd-5b2ab695694d service nova] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Neutron deleted interface 0769c926-1f70-44d4-8559-8b1f4e5aa14a; detaching it from the instance and deleting it from the info cache [ 838.203644] env[68569]: DEBUG nova.network.neutron [req-83fdc558-33d1-4936-8ddd-d0c41b3c730d req-72eea2a1-cae3-43e5-97fd-5b2ab695694d service nova] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.324293] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4317d572-61e6-41b9-9aa3-88a513ce96ab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.334615] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0b0179-630e-40ba-963c-0105364ec0e3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.379061] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e446af-c777-4138-964a-70ba87493d4e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.393361] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0055fca7-913c-455e-92da-a29debf7bebd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.411229] env[68569]: DEBUG nova.compute.provider_tree [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 838.484761] env[68569]: INFO nova.compute.manager [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Took 60.88 seconds to build instance. [ 838.572781] env[68569]: DEBUG nova.network.neutron [-] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.642149] env[68569]: DEBUG nova.network.neutron [-] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.707062] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e5c68b3-4e16-45d6-b1d2-744d3d06f00b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.716742] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b74d5d0-5bd8-4282-b62f-e7561668cfe4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.745832] env[68569]: DEBUG nova.compute.manager [req-83fdc558-33d1-4936-8ddd-d0c41b3c730d req-72eea2a1-cae3-43e5-97fd-5b2ab695694d service nova] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Detach interface failed, port_id=0769c926-1f70-44d4-8559-8b1f4e5aa14a, reason: Instance 57a63648-83e9-4f23-aebc-050e58149ce2 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 838.884680] env[68569]: DEBUG nova.compute.manager [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 838.910060] env[68569]: DEBUG nova.virt.hardware [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 838.910060] env[68569]: DEBUG nova.virt.hardware [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 838.910252] env[68569]: DEBUG nova.virt.hardware [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 838.910252] env[68569]: DEBUG nova.virt.hardware [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 838.910514] env[68569]: DEBUG nova.virt.hardware [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 838.910663] env[68569]: DEBUG nova.virt.hardware [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 838.911016] env[68569]: DEBUG nova.virt.hardware [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 838.911016] env[68569]: DEBUG nova.virt.hardware [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 838.911187] env[68569]: DEBUG nova.virt.hardware [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 838.912308] env[68569]: DEBUG nova.virt.hardware [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 838.912308] env[68569]: DEBUG nova.virt.hardware [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 838.912634] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f107552a-cc75-4e70-9421-eb371433ed11 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.924653] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acabaaa9-b164-4f02-822a-7f401becb102 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.941801] env[68569]: ERROR nova.scheduler.client.report [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [req-c1cbaf39-db6b-4b0a-9249-992ce42736bb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c1cbaf39-db6b-4b0a-9249-992ce42736bb"}]} [ 838.958985] env[68569]: DEBUG nova.scheduler.client.report [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 838.973208] env[68569]: DEBUG nova.scheduler.client.report [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 838.973540] env[68569]: DEBUG nova.compute.provider_tree [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 838.986835] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0868097e-d700-4b39-9bef-c227fa3e7800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "b770fbd1-579a-4e3e-a5c9-9f030695f057" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.470s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.992033] env[68569]: DEBUG nova.scheduler.client.report [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 838.992033] env[68569]: DEBUG nova.compute.provider_tree [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 91 to 92 during operation: update_aggregates {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 839.010059] env[68569]: DEBUG nova.scheduler.client.report [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 839.075526] env[68569]: INFO nova.compute.manager [-] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Took 1.46 seconds to deallocate network for instance. [ 839.144689] env[68569]: INFO nova.compute.manager [-] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Took 1.38 seconds to deallocate network for instance. [ 839.422281] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17621135-ae4e-4b36-8bb9-36d85b8f10f3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.431263] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e491cb6-9406-4baa-846a-1a227344e20e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.463816] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-debb3111-b6f9-401f-9323-b6ebbcb88ee8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.472166] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1019310-95b5-4889-ad91-3da1d5eda044 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.480484] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Acquiring lock "0943cfd5-33fb-4b02-9e4d-93f18385bdae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.480767] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Lock "0943cfd5-33fb-4b02-9e4d-93f18385bdae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.489702] env[68569]: DEBUG nova.compute.manager [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 839.492497] env[68569]: DEBUG nova.compute.provider_tree [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 839.584771] env[68569]: DEBUG oslo_concurrency.lockutils [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.655853] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.734887] env[68569]: DEBUG nova.compute.manager [req-0f211e50-4187-4298-ae13-f2a1f5bb2d33 req-94d308b0-329f-4fe2-90ae-6d736507287c service nova] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Received event network-vif-plugged-3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 839.735129] env[68569]: DEBUG oslo_concurrency.lockutils [req-0f211e50-4187-4298-ae13-f2a1f5bb2d33 req-94d308b0-329f-4fe2-90ae-6d736507287c service nova] Acquiring lock "ab021831-2cc3-4457-aa55-b55036c2a423-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.735339] env[68569]: DEBUG oslo_concurrency.lockutils [req-0f211e50-4187-4298-ae13-f2a1f5bb2d33 req-94d308b0-329f-4fe2-90ae-6d736507287c service nova] Lock "ab021831-2cc3-4457-aa55-b55036c2a423-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.735504] env[68569]: DEBUG oslo_concurrency.lockutils [req-0f211e50-4187-4298-ae13-f2a1f5bb2d33 req-94d308b0-329f-4fe2-90ae-6d736507287c service nova] Lock "ab021831-2cc3-4457-aa55-b55036c2a423-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.735670] env[68569]: DEBUG nova.compute.manager [req-0f211e50-4187-4298-ae13-f2a1f5bb2d33 req-94d308b0-329f-4fe2-90ae-6d736507287c service nova] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] No waiting events found dispatching network-vif-plugged-3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 839.735833] env[68569]: WARNING nova.compute.manager [req-0f211e50-4187-4298-ae13-f2a1f5bb2d33 req-94d308b0-329f-4fe2-90ae-6d736507287c service nova] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Received unexpected event network-vif-plugged-3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe for instance with vm_state building and task_state spawning. [ 839.834665] env[68569]: DEBUG nova.network.neutron [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Successfully updated port: 3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 839.874625] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "367f4fe5-ffef-45f3-b00e-a5cf0418d5cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.874865] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "367f4fe5-ffef-45f3-b00e-a5cf0418d5cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.016923] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.026932] env[68569]: ERROR nova.scheduler.client.report [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [req-a9164a55-3a15-4b29-8235-c6a70e6cd51e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a9164a55-3a15-4b29-8235-c6a70e6cd51e"}]} [ 840.050658] env[68569]: DEBUG nova.scheduler.client.report [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 840.074190] env[68569]: DEBUG nova.scheduler.client.report [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 840.074512] env[68569]: DEBUG nova.compute.provider_tree [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 840.093207] env[68569]: DEBUG nova.scheduler.client.report [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 840.118712] env[68569]: DEBUG nova.scheduler.client.report [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 840.226446] env[68569]: DEBUG nova.compute.manager [req-2e8711df-c547-49de-a5c9-503487dc5261 req-78baa0fb-e152-4c21-97f6-d98eae52515e service nova] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Received event network-vif-deleted-72163272-5b38-43c3-8978-a346db826a81 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 840.338402] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "refresh_cache-ab021831-2cc3-4457-aa55-b55036c2a423" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.338572] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "refresh_cache-ab021831-2cc3-4457-aa55-b55036c2a423" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.338787] env[68569]: DEBUG nova.network.neutron [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.511381] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ccae6c-8170-4d08-8318-89b597885c18 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.521738] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c236f64b-b04e-4f54-93e4-8723b3575b6b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.552251] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be9133a-76d0-4823-b0e4-e394341f538c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.561040] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f089db9f-086c-423a-8d88-7ce360cd4e36 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.573912] env[68569]: DEBUG nova.compute.provider_tree [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 840.876755] env[68569]: DEBUG nova.network.neutron [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.027461] env[68569]: DEBUG nova.network.neutron [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Updating instance_info_cache with network_info: [{"id": "3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe", "address": "fa:16:3e:14:98:10", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3aef6e13-a8", "ovs_interfaceid": "3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.109368] env[68569]: DEBUG nova.scheduler.client.report [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 93 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 841.109648] env[68569]: DEBUG nova.compute.provider_tree [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 93 to 94 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 841.109827] env[68569]: DEBUG nova.compute.provider_tree [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 841.530288] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "refresh_cache-ab021831-2cc3-4457-aa55-b55036c2a423" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.530660] env[68569]: DEBUG nova.compute.manager [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Instance network_info: |[{"id": "3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe", "address": "fa:16:3e:14:98:10", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3aef6e13-a8", "ovs_interfaceid": "3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 841.531119] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:98:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 841.538442] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Creating folder: Project (9cc82d358e214a959ae6b34c33344b86). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 841.538727] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac6bb2c0-88d7-436b-9c69-f79126187400 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.549918] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Created folder: Project (9cc82d358e214a959ae6b34c33344b86) in parent group-v633430. [ 841.550121] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Creating folder: Instances. Parent ref: group-v633596. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 841.550350] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85ad68dc-2930-42b9-80bc-bb5c9b2bd982 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.560072] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Created folder: Instances in parent group-v633596. [ 841.560299] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 841.560508] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 841.560725] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3e4261b-dc4a-4864-96cb-d0fb9f2b4713 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.579855] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 841.579855] env[68569]: value = "task-3167073" [ 841.579855] env[68569]: _type = "Task" [ 841.579855] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.586989] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167073, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.616069] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.790s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.616622] env[68569]: DEBUG nova.compute.manager [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 841.619201] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 48.455s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.623136] env[68569]: INFO nova.compute.claims [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 841.762371] env[68569]: DEBUG nova.compute.manager [req-adbd5f82-635a-48a2-8859-7cfdd2a684a8 req-520943d6-2543-45cd-8b60-ec14a678685f service nova] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Received event network-changed-3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 841.762524] env[68569]: DEBUG nova.compute.manager [req-adbd5f82-635a-48a2-8859-7cfdd2a684a8 req-520943d6-2543-45cd-8b60-ec14a678685f service nova] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Refreshing instance network info cache due to event network-changed-3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 841.762772] env[68569]: DEBUG oslo_concurrency.lockutils [req-adbd5f82-635a-48a2-8859-7cfdd2a684a8 req-520943d6-2543-45cd-8b60-ec14a678685f service nova] Acquiring lock "refresh_cache-ab021831-2cc3-4457-aa55-b55036c2a423" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.762919] env[68569]: DEBUG oslo_concurrency.lockutils [req-adbd5f82-635a-48a2-8859-7cfdd2a684a8 req-520943d6-2543-45cd-8b60-ec14a678685f service nova] Acquired lock "refresh_cache-ab021831-2cc3-4457-aa55-b55036c2a423" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.763240] env[68569]: DEBUG nova.network.neutron [req-adbd5f82-635a-48a2-8859-7cfdd2a684a8 req-520943d6-2543-45cd-8b60-ec14a678685f service nova] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Refreshing network info cache for port 3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.090070] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167073, 'name': CreateVM_Task, 'duration_secs': 0.302335} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.090246] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 842.091053] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.091243] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.091559] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 842.091824] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5b4f359-a8b6-4560-a5cd-f2ca9aa78991 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.096250] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 842.096250] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521f7751-22d6-43fc-eb7a-8323e8fa65a6" [ 842.096250] env[68569]: _type = "Task" [ 842.096250] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.103708] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521f7751-22d6-43fc-eb7a-8323e8fa65a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.127630] env[68569]: DEBUG nova.compute.utils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 842.128632] env[68569]: DEBUG nova.compute.manager [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 842.128771] env[68569]: DEBUG nova.network.neutron [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 842.174945] env[68569]: DEBUG nova.policy [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a84e1c69c5454ffcab15cea91a4c1dfb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03a77ea99dbb445396a7ac5888135321', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 842.457078] env[68569]: DEBUG nova.network.neutron [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Successfully created port: 4febb0a1-9ff1-4755-b668-4e2723df003b {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 842.480146] env[68569]: DEBUG nova.network.neutron [req-adbd5f82-635a-48a2-8859-7cfdd2a684a8 req-520943d6-2543-45cd-8b60-ec14a678685f service nova] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Updated VIF entry in instance network info cache for port 3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 842.480500] env[68569]: DEBUG nova.network.neutron [req-adbd5f82-635a-48a2-8859-7cfdd2a684a8 req-520943d6-2543-45cd-8b60-ec14a678685f service nova] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Updating instance_info_cache with network_info: [{"id": "3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe", "address": "fa:16:3e:14:98:10", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3aef6e13-a8", "ovs_interfaceid": "3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.607195] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521f7751-22d6-43fc-eb7a-8323e8fa65a6, 'name': SearchDatastore_Task, 'duration_secs': 0.009158} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.607569] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.607810] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.608313] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.608469] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.608656] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.608920] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5500c668-af97-42ad-b39c-985f48d1006b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.617182] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.617373] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 842.618064] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-334c6df5-b457-4230-8689-ca2935b0d5b4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.623087] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 842.623087] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5289d7c4-0810-0bd6-17aa-52e4a0c6a33d" [ 842.623087] env[68569]: _type = "Task" [ 842.623087] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.634113] env[68569]: DEBUG nova.compute.manager [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 842.636611] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5289d7c4-0810-0bd6-17aa-52e4a0c6a33d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.983352] env[68569]: DEBUG oslo_concurrency.lockutils [req-adbd5f82-635a-48a2-8859-7cfdd2a684a8 req-520943d6-2543-45cd-8b60-ec14a678685f service nova] Releasing lock "refresh_cache-ab021831-2cc3-4457-aa55-b55036c2a423" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.001917] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44154162-c957-4827-a92c-23a0d1ecd78e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.009145] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c3ca6e-e967-40c2-96fa-8a208185af7c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.039343] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d0eeb0-2afa-4132-95cd-13e4e3f5b9d5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.046563] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c34b27e-e9d9-4426-b7f8-fcf7e0364723 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.059191] env[68569]: DEBUG nova.compute.provider_tree [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.133349] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5289d7c4-0810-0bd6-17aa-52e4a0c6a33d, 'name': SearchDatastore_Task, 'duration_secs': 0.007495} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.134344] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11951bef-48e7-4cf3-b6f0-dbfe326a2d72 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.142040] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 843.142040] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529f6bb7-3f3e-dd6c-2300-d6916fe7c543" [ 843.142040] env[68569]: _type = "Task" [ 843.142040] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.149270] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529f6bb7-3f3e-dd6c-2300-d6916fe7c543, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.562537] env[68569]: DEBUG nova.scheduler.client.report [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 843.643904] env[68569]: DEBUG nova.compute.manager [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 843.655368] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529f6bb7-3f3e-dd6c-2300-d6916fe7c543, 'name': SearchDatastore_Task, 'duration_secs': 0.009443} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.655629] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.655885] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] ab021831-2cc3-4457-aa55-b55036c2a423/ab021831-2cc3-4457-aa55-b55036c2a423.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 843.656154] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ebdfaef6-5e23-4df3-ab79-23021f79bbbf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.663380] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 843.663380] env[68569]: value = "task-3167074" [ 843.663380] env[68569]: _type = "Task" [ 843.663380] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.670358] env[68569]: DEBUG nova.virt.hardware [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 843.670552] env[68569]: DEBUG nova.virt.hardware [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 843.670739] env[68569]: DEBUG nova.virt.hardware [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 843.670952] env[68569]: DEBUG nova.virt.hardware [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 843.671113] env[68569]: DEBUG nova.virt.hardware [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 843.671277] env[68569]: DEBUG nova.virt.hardware [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 843.671477] env[68569]: DEBUG nova.virt.hardware [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 843.671629] env[68569]: DEBUG nova.virt.hardware [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 843.671797] env[68569]: DEBUG nova.virt.hardware [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 843.672210] env[68569]: DEBUG nova.virt.hardware [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 843.672210] env[68569]: DEBUG nova.virt.hardware [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 843.672910] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-881dca20-a6c2-4dd5-8b33-39a55ee1a5e5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.679748] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167074, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.682847] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a974f26-e6f1-4ba4-a4e2-96a2b557ad0e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.939176] env[68569]: DEBUG nova.compute.manager [req-ef77208d-9393-4936-98b2-45e0b0d26d10 req-78ff8ffb-167d-403d-be4f-403deffdf7c4 service nova] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Received event network-vif-plugged-4febb0a1-9ff1-4755-b668-4e2723df003b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 843.939306] env[68569]: DEBUG oslo_concurrency.lockutils [req-ef77208d-9393-4936-98b2-45e0b0d26d10 req-78ff8ffb-167d-403d-be4f-403deffdf7c4 service nova] Acquiring lock "123a6895-af16-493a-afce-7ae6c2137422-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.939504] env[68569]: DEBUG oslo_concurrency.lockutils [req-ef77208d-9393-4936-98b2-45e0b0d26d10 req-78ff8ffb-167d-403d-be4f-403deffdf7c4 service nova] Lock "123a6895-af16-493a-afce-7ae6c2137422-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.939673] env[68569]: DEBUG oslo_concurrency.lockutils [req-ef77208d-9393-4936-98b2-45e0b0d26d10 req-78ff8ffb-167d-403d-be4f-403deffdf7c4 service nova] Lock "123a6895-af16-493a-afce-7ae6c2137422-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.940020] env[68569]: DEBUG nova.compute.manager [req-ef77208d-9393-4936-98b2-45e0b0d26d10 req-78ff8ffb-167d-403d-be4f-403deffdf7c4 service nova] [instance: 123a6895-af16-493a-afce-7ae6c2137422] No waiting events found dispatching network-vif-plugged-4febb0a1-9ff1-4755-b668-4e2723df003b {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 843.940122] env[68569]: WARNING nova.compute.manager [req-ef77208d-9393-4936-98b2-45e0b0d26d10 req-78ff8ffb-167d-403d-be4f-403deffdf7c4 service nova] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Received unexpected event network-vif-plugged-4febb0a1-9ff1-4755-b668-4e2723df003b for instance with vm_state building and task_state spawning. [ 844.069347] env[68569]: DEBUG nova.network.neutron [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Successfully updated port: 4febb0a1-9ff1-4755-b668-4e2723df003b {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 844.071580] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.072091] env[68569]: DEBUG nova.compute.manager [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 844.075520] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.376s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.075739] env[68569]: DEBUG nova.objects.instance [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lazy-loading 'resources' on Instance uuid eec09a1c-e8b2-4b6a-9545-e190e1f965d1 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 844.173379] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167074, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.576152] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "refresh_cache-123a6895-af16-493a-afce-7ae6c2137422" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.576323] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquired lock "refresh_cache-123a6895-af16-493a-afce-7ae6c2137422" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.576479] env[68569]: DEBUG nova.network.neutron [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 844.580034] env[68569]: DEBUG nova.compute.utils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 844.583211] env[68569]: DEBUG nova.compute.manager [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 844.583385] env[68569]: DEBUG nova.network.neutron [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 844.636783] env[68569]: DEBUG nova.policy [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3dd70bdeb58448d89783e139e1565ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c76d37ddf9e04b56af59b059c0646318', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 844.678266] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167074, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.997217] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8d9b91-31df-4917-b6fc-27844cdcf326 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.004782] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd005e1-ac42-415e-b133-04b68034aa0c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.010809] env[68569]: DEBUG nova.network.neutron [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Successfully created port: 7eb93f86-e36c-4fa0-838c-ff5929a60333 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 845.038469] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6b7920-eb15-4336-930a-63cc2fd84841 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.045629] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc4b58f-6ba6-4213-9985-3acd5eb8cf5b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.058894] env[68569]: DEBUG nova.compute.provider_tree [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.083666] env[68569]: DEBUG nova.compute.manager [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 845.127813] env[68569]: DEBUG nova.network.neutron [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 845.175085] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167074, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.448234} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.175369] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] ab021831-2cc3-4457-aa55-b55036c2a423/ab021831-2cc3-4457-aa55-b55036c2a423.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 845.175591] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 845.175855] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ed3fa13-05a8-46f8-b746-c7dbc3f7250d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.181662] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 845.181662] env[68569]: value = "task-3167075" [ 845.181662] env[68569]: _type = "Task" [ 845.181662] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.192210] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167075, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.277203] env[68569]: DEBUG nova.network.neutron [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Updating instance_info_cache with network_info: [{"id": "4febb0a1-9ff1-4755-b668-4e2723df003b", "address": "fa:16:3e:52:96:c0", "network": {"id": "f10bb94d-d811-400a-ada9-30d7720c1ddc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2037478415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "03a77ea99dbb445396a7ac5888135321", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4febb0a1-9f", "ovs_interfaceid": "4febb0a1-9ff1-4755-b668-4e2723df003b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.562199] env[68569]: DEBUG nova.scheduler.client.report [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 845.692249] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167075, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.194595} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.693152] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 845.693782] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbf3b0e-b8cc-4267-b851-a0be4f0282c0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.716612] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] ab021831-2cc3-4457-aa55-b55036c2a423/ab021831-2cc3-4457-aa55-b55036c2a423.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.716898] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7454203d-3122-4600-aae6-64acd879b403 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.736252] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 845.736252] env[68569]: value = "task-3167076" [ 845.736252] env[68569]: _type = "Task" [ 845.736252] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.746346] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167076, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.779710] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Releasing lock "refresh_cache-123a6895-af16-493a-afce-7ae6c2137422" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.780178] env[68569]: DEBUG nova.compute.manager [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Instance network_info: |[{"id": "4febb0a1-9ff1-4755-b668-4e2723df003b", "address": "fa:16:3e:52:96:c0", "network": {"id": "f10bb94d-d811-400a-ada9-30d7720c1ddc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2037478415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "03a77ea99dbb445396a7ac5888135321", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4febb0a1-9f", "ovs_interfaceid": "4febb0a1-9ff1-4755-b668-4e2723df003b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 845.780798] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:96:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd72ef32-a57c-43b0-93df-e8a030987d44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4febb0a1-9ff1-4755-b668-4e2723df003b', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 845.792637] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Creating folder: Project (03a77ea99dbb445396a7ac5888135321). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 845.793043] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2e5b843-ca5f-4c10-a376-f1fbd9b7ceee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.805176] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Created folder: Project (03a77ea99dbb445396a7ac5888135321) in parent group-v633430. [ 845.805355] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Creating folder: Instances. Parent ref: group-v633599. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 845.805651] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-739e7c0a-255d-46df-9fd2-8fb3797e6f6b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.816242] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Created folder: Instances in parent group-v633599. [ 845.816344] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 845.816499] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 845.816731] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e8a3d3f-7a2b-4f2f-aab2-f6824e1e1cba {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.838861] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 845.838861] env[68569]: value = "task-3167079" [ 845.838861] env[68569]: _type = "Task" [ 845.838861] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.850129] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167079, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.024228] env[68569]: DEBUG nova.compute.manager [req-e40504a4-ae92-4a36-b367-26974b02e302 req-b56f0643-df25-42f2-9a4a-cf00742a641c service nova] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Received event network-changed-4febb0a1-9ff1-4755-b668-4e2723df003b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 846.024427] env[68569]: DEBUG nova.compute.manager [req-e40504a4-ae92-4a36-b367-26974b02e302 req-b56f0643-df25-42f2-9a4a-cf00742a641c service nova] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Refreshing instance network info cache due to event network-changed-4febb0a1-9ff1-4755-b668-4e2723df003b. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 846.024718] env[68569]: DEBUG oslo_concurrency.lockutils [req-e40504a4-ae92-4a36-b367-26974b02e302 req-b56f0643-df25-42f2-9a4a-cf00742a641c service nova] Acquiring lock "refresh_cache-123a6895-af16-493a-afce-7ae6c2137422" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.024894] env[68569]: DEBUG oslo_concurrency.lockutils [req-e40504a4-ae92-4a36-b367-26974b02e302 req-b56f0643-df25-42f2-9a4a-cf00742a641c service nova] Acquired lock "refresh_cache-123a6895-af16-493a-afce-7ae6c2137422" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.025108] env[68569]: DEBUG nova.network.neutron [req-e40504a4-ae92-4a36-b367-26974b02e302 req-b56f0643-df25-42f2-9a4a-cf00742a641c service nova] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Refreshing network info cache for port 4febb0a1-9ff1-4755-b668-4e2723df003b {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 846.067860] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.992s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.071014] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 45.483s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.071376] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.071376] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68569) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 846.071643] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.745s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.072135] env[68569]: DEBUG nova.objects.instance [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lazy-loading 'resources' on Instance uuid 492c0fa1-f821-496a-86c2-f7686479a733 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 846.073772] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476da999-d085-401f-bfb1-7cc84a94e6af {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.083401] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec96a1e-096b-4d42-b912-6415c36c877b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.090749] env[68569]: INFO nova.scheduler.client.report [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Deleted allocations for instance eec09a1c-e8b2-4b6a-9545-e190e1f965d1 [ 846.103124] env[68569]: DEBUG nova.compute.manager [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 846.108422] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4cc464-bdaf-4e77-8f95-c8f1f120234a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.117829] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7561ea30-9b84-4796-aa05-4677437ef1f2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.150131] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179448MB free_disk=128GB free_vcpus=48 pci_devices=None {{(pid=68569) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 846.150429] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.155780] env[68569]: DEBUG nova.virt.hardware [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 846.156045] env[68569]: DEBUG nova.virt.hardware [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.156216] env[68569]: DEBUG nova.virt.hardware [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 846.156409] env[68569]: DEBUG nova.virt.hardware [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.156563] env[68569]: DEBUG nova.virt.hardware [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 846.156731] env[68569]: DEBUG nova.virt.hardware [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 846.156956] env[68569]: DEBUG nova.virt.hardware [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 846.157131] env[68569]: DEBUG nova.virt.hardware [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 846.157310] env[68569]: DEBUG nova.virt.hardware [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 846.157481] env[68569]: DEBUG nova.virt.hardware [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 846.157651] env[68569]: DEBUG nova.virt.hardware [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 846.158831] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a52006b-29bd-49e9-b1b0-0c5877d57dcb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.169637] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c210ce9c-e915-49ae-9e98-a2e7d6a87740 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.246061] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167076, 'name': ReconfigVM_Task, 'duration_secs': 0.286239} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.246346] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Reconfigured VM instance instance-0000003a to attach disk [datastore1] ab021831-2cc3-4457-aa55-b55036c2a423/ab021831-2cc3-4457-aa55-b55036c2a423.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.246949] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4d1ee062-9c91-4a17-b093-f4c585f3cb4f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.253484] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 846.253484] env[68569]: value = "task-3167080" [ 846.253484] env[68569]: _type = "Task" [ 846.253484] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.260952] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167080, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.351376] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167079, 'name': CreateVM_Task, 'duration_secs': 0.340914} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.351678] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 846.352629] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.353080] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.353430] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 846.353764] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9b35dc7-7a16-4c20-bcb1-a75eb46ae0ab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.359321] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 846.359321] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d91f9e-53fc-1abf-eac6-0e952d4e8edd" [ 846.359321] env[68569]: _type = "Task" [ 846.359321] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.367864] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d91f9e-53fc-1abf-eac6-0e952d4e8edd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.569239] env[68569]: DEBUG nova.network.neutron [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Successfully updated port: 7eb93f86-e36c-4fa0-838c-ff5929a60333 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 846.613860] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3e934a02-fcb4-42ae-a923-df94621043f0 tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "eec09a1c-e8b2-4b6a-9545-e190e1f965d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.540s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.712502] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "de2b0206-0c73-4275-89ff-37199520dd71" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.712825] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "de2b0206-0c73-4275-89ff-37199520dd71" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.713041] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "de2b0206-0c73-4275-89ff-37199520dd71-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.713188] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "de2b0206-0c73-4275-89ff-37199520dd71-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.713353] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "de2b0206-0c73-4275-89ff-37199520dd71-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.715870] env[68569]: INFO nova.compute.manager [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Terminating instance [ 846.767534] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167080, 'name': Rename_Task, 'duration_secs': 0.167071} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.767822] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 846.769134] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d819f19-fd39-41c6-aceb-1045c15317fe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.773061] env[68569]: DEBUG nova.network.neutron [req-e40504a4-ae92-4a36-b367-26974b02e302 req-b56f0643-df25-42f2-9a4a-cf00742a641c service nova] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Updated VIF entry in instance network info cache for port 4febb0a1-9ff1-4755-b668-4e2723df003b. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 846.773376] env[68569]: DEBUG nova.network.neutron [req-e40504a4-ae92-4a36-b367-26974b02e302 req-b56f0643-df25-42f2-9a4a-cf00742a641c service nova] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Updating instance_info_cache with network_info: [{"id": "4febb0a1-9ff1-4755-b668-4e2723df003b", "address": "fa:16:3e:52:96:c0", "network": {"id": "f10bb94d-d811-400a-ada9-30d7720c1ddc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2037478415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "03a77ea99dbb445396a7ac5888135321", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4febb0a1-9f", "ovs_interfaceid": "4febb0a1-9ff1-4755-b668-4e2723df003b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.779115] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 846.779115] env[68569]: value = "task-3167081" [ 846.779115] env[68569]: _type = "Task" [ 846.779115] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.789883] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167081, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.873337] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d91f9e-53fc-1abf-eac6-0e952d4e8edd, 'name': SearchDatastore_Task, 'duration_secs': 0.009739} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.873626] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.873862] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 846.874104] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.874252] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.874433] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 846.874693] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73b9124c-f9ec-45ef-8ae7-30736e704140 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.885024] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 846.885128] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 846.885858] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7783f1d2-d1d3-4da3-9c98-4042c55ec136 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.893360] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 846.893360] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c94488-f07a-b67d-8d4e-a15919c0dbd4" [ 846.893360] env[68569]: _type = "Task" [ 846.893360] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.901885] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c94488-f07a-b67d-8d4e-a15919c0dbd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.954028] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049f1691-cee1-46c0-94ae-007516670885 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.961772] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a173a9-357c-4bc2-b2eb-c82d8450057f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.007434] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1baf8016-4a80-40cf-b966-8f36539aed9b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.022868] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a57c47b-2520-49a9-823a-7a46c59103a4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.037445] env[68569]: DEBUG nova.compute.provider_tree [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 847.072395] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Acquiring lock "refresh_cache-060fc4c8-b173-4fc4-8232-e13e3eac9dc3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.072547] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Acquired lock "refresh_cache-060fc4c8-b173-4fc4-8232-e13e3eac9dc3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.072580] env[68569]: DEBUG nova.network.neutron [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 847.220887] env[68569]: DEBUG nova.compute.manager [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 847.221131] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 847.222129] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a208d8-8eda-4673-8cff-1741936217b9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.229645] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 847.229875] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-972bc370-1552-45bb-9957-fd4f99efd3de {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.236116] env[68569]: DEBUG oslo_vmware.api [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 847.236116] env[68569]: value = "task-3167082" [ 847.236116] env[68569]: _type = "Task" [ 847.236116] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.245847] env[68569]: DEBUG oslo_vmware.api [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3167082, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.277646] env[68569]: DEBUG oslo_concurrency.lockutils [req-e40504a4-ae92-4a36-b367-26974b02e302 req-b56f0643-df25-42f2-9a4a-cf00742a641c service nova] Releasing lock "refresh_cache-123a6895-af16-493a-afce-7ae6c2137422" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.289136] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167081, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.406543] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c94488-f07a-b67d-8d4e-a15919c0dbd4, 'name': SearchDatastore_Task, 'duration_secs': 0.01548} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.407364] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f633ae56-93bb-4aa1-a2b1-d9222f24a4e3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.412625] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 847.412625] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b812c9-509e-cc63-d8e6-e8b440ade8d3" [ 847.412625] env[68569]: _type = "Task" [ 847.412625] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.421041] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b812c9-509e-cc63-d8e6-e8b440ade8d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.569804] env[68569]: DEBUG nova.scheduler.client.report [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 94 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 847.570106] env[68569]: DEBUG nova.compute.provider_tree [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 94 to 95 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 847.570288] env[68569]: DEBUG nova.compute.provider_tree [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 847.604052] env[68569]: DEBUG nova.network.neutron [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 847.745927] env[68569]: DEBUG oslo_vmware.api [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3167082, 'name': PowerOffVM_Task, 'duration_secs': 0.194572} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.746223] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 847.746273] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 847.746507] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7ac13f5-f3e7-4041-bb15-636fcc5987f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.753831] env[68569]: DEBUG nova.network.neutron [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Updating instance_info_cache with network_info: [{"id": "7eb93f86-e36c-4fa0-838c-ff5929a60333", "address": "fa:16:3e:65:92:91", "network": {"id": "f8fbe9cc-a280-4b7d-929f-5595f6ac8084", "bridge": "br-int", "label": "tempest-ServersTestJSON-71300592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c76d37ddf9e04b56af59b059c0646318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eb93f86-e3", "ovs_interfaceid": "7eb93f86-e36c-4fa0-838c-ff5929a60333", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.791867] env[68569]: DEBUG oslo_vmware.api [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167081, 'name': PowerOnVM_Task, 'duration_secs': 0.771082} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.792127] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 847.793025] env[68569]: INFO nova.compute.manager [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Took 8.91 seconds to spawn the instance on the hypervisor. [ 847.793025] env[68569]: DEBUG nova.compute.manager [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 847.793322] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4256cd-79c8-46d0-b8dd-ef7c118d5ff0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.811773] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 847.811997] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 847.813039] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Deleting the datastore file [datastore1] de2b0206-0c73-4275-89ff-37199520dd71 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 847.813334] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2bcc1b09-0369-421a-9815-a2815a080005 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.821061] env[68569]: DEBUG oslo_vmware.api [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for the task: (returnval){ [ 847.821061] env[68569]: value = "task-3167084" [ 847.821061] env[68569]: _type = "Task" [ 847.821061] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.830052] env[68569]: DEBUG oslo_vmware.api [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3167084, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.923722] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b812c9-509e-cc63-d8e6-e8b440ade8d3, 'name': SearchDatastore_Task, 'duration_secs': 0.015242} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.923970] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.924273] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 123a6895-af16-493a-afce-7ae6c2137422/123a6895-af16-493a-afce-7ae6c2137422.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 847.924630] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-adb2e5d4-f4dc-49eb-b1d2-681dff6e5995 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.932030] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 847.932030] env[68569]: value = "task-3167085" [ 847.932030] env[68569]: _type = "Task" [ 847.932030] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.939707] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167085, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.049342] env[68569]: DEBUG nova.compute.manager [req-45b99963-ccae-4179-8e04-5fa2030d7cb7 req-f1d401bb-a325-49ab-9560-d139b9255e21 service nova] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Received event network-vif-plugged-7eb93f86-e36c-4fa0-838c-ff5929a60333 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 848.049577] env[68569]: DEBUG oslo_concurrency.lockutils [req-45b99963-ccae-4179-8e04-5fa2030d7cb7 req-f1d401bb-a325-49ab-9560-d139b9255e21 service nova] Acquiring lock "060fc4c8-b173-4fc4-8232-e13e3eac9dc3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.049798] env[68569]: DEBUG oslo_concurrency.lockutils [req-45b99963-ccae-4179-8e04-5fa2030d7cb7 req-f1d401bb-a325-49ab-9560-d139b9255e21 service nova] Lock "060fc4c8-b173-4fc4-8232-e13e3eac9dc3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.049977] env[68569]: DEBUG oslo_concurrency.lockutils [req-45b99963-ccae-4179-8e04-5fa2030d7cb7 req-f1d401bb-a325-49ab-9560-d139b9255e21 service nova] Lock "060fc4c8-b173-4fc4-8232-e13e3eac9dc3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.050201] env[68569]: DEBUG nova.compute.manager [req-45b99963-ccae-4179-8e04-5fa2030d7cb7 req-f1d401bb-a325-49ab-9560-d139b9255e21 service nova] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] No waiting events found dispatching network-vif-plugged-7eb93f86-e36c-4fa0-838c-ff5929a60333 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 848.050394] env[68569]: WARNING nova.compute.manager [req-45b99963-ccae-4179-8e04-5fa2030d7cb7 req-f1d401bb-a325-49ab-9560-d139b9255e21 service nova] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Received unexpected event network-vif-plugged-7eb93f86-e36c-4fa0-838c-ff5929a60333 for instance with vm_state building and task_state spawning. [ 848.050564] env[68569]: DEBUG nova.compute.manager [req-45b99963-ccae-4179-8e04-5fa2030d7cb7 req-f1d401bb-a325-49ab-9560-d139b9255e21 service nova] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Received event network-changed-7eb93f86-e36c-4fa0-838c-ff5929a60333 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 848.050729] env[68569]: DEBUG nova.compute.manager [req-45b99963-ccae-4179-8e04-5fa2030d7cb7 req-f1d401bb-a325-49ab-9560-d139b9255e21 service nova] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Refreshing instance network info cache due to event network-changed-7eb93f86-e36c-4fa0-838c-ff5929a60333. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 848.050943] env[68569]: DEBUG oslo_concurrency.lockutils [req-45b99963-ccae-4179-8e04-5fa2030d7cb7 req-f1d401bb-a325-49ab-9560-d139b9255e21 service nova] Acquiring lock "refresh_cache-060fc4c8-b173-4fc4-8232-e13e3eac9dc3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.075213] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.003s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.077913] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.312s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.078175] env[68569]: DEBUG nova.objects.instance [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lazy-loading 'resources' on Instance uuid 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 848.096885] env[68569]: INFO nova.scheduler.client.report [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleted allocations for instance 492c0fa1-f821-496a-86c2-f7686479a733 [ 848.258974] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Releasing lock "refresh_cache-060fc4c8-b173-4fc4-8232-e13e3eac9dc3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.263022] env[68569]: DEBUG nova.compute.manager [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Instance network_info: |[{"id": "7eb93f86-e36c-4fa0-838c-ff5929a60333", "address": "fa:16:3e:65:92:91", "network": {"id": "f8fbe9cc-a280-4b7d-929f-5595f6ac8084", "bridge": "br-int", "label": "tempest-ServersTestJSON-71300592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c76d37ddf9e04b56af59b059c0646318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eb93f86-e3", "ovs_interfaceid": "7eb93f86-e36c-4fa0-838c-ff5929a60333", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 848.263022] env[68569]: DEBUG oslo_concurrency.lockutils [req-45b99963-ccae-4179-8e04-5fa2030d7cb7 req-f1d401bb-a325-49ab-9560-d139b9255e21 service nova] Acquired lock "refresh_cache-060fc4c8-b173-4fc4-8232-e13e3eac9dc3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.263361] env[68569]: DEBUG nova.network.neutron [req-45b99963-ccae-4179-8e04-5fa2030d7cb7 req-f1d401bb-a325-49ab-9560-d139b9255e21 service nova] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Refreshing network info cache for port 7eb93f86-e36c-4fa0-838c-ff5929a60333 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 848.263361] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:92:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98f447de-d71e-41ef-bc37-ed97b4a1f58f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7eb93f86-e36c-4fa0-838c-ff5929a60333', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 848.270575] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Creating folder: Project (c76d37ddf9e04b56af59b059c0646318). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 848.271305] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4328c2b4-ff13-436e-a8c2-40df9fd478d6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.285874] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Created folder: Project (c76d37ddf9e04b56af59b059c0646318) in parent group-v633430. [ 848.286572] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Creating folder: Instances. Parent ref: group-v633602. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 848.286572] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-deccd221-308a-423b-a690-ea4326b6d8a5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.297040] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Created folder: Instances in parent group-v633602. [ 848.297335] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 848.297547] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 848.297758] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e95f4cf-7f92-4e80-90c9-83d469188e23 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.322051] env[68569]: INFO nova.compute.manager [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Took 63.32 seconds to build instance. [ 848.331464] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 848.331464] env[68569]: value = "task-3167088" [ 848.331464] env[68569]: _type = "Task" [ 848.331464] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.338172] env[68569]: DEBUG oslo_vmware.api [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Task: {'id': task-3167084, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.275917} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.338852] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 848.339070] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 848.339264] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 848.339439] env[68569]: INFO nova.compute.manager [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Took 1.12 seconds to destroy the instance on the hypervisor. [ 848.339679] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 848.339879] env[68569]: DEBUG nova.compute.manager [-] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 848.339977] env[68569]: DEBUG nova.network.neutron [-] [instance: de2b0206-0c73-4275-89ff-37199520dd71] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 848.345494] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167088, 'name': CreateVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.444544] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167085, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.607727] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9abf307a-759f-481a-8131-49a74d3bbbb1 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "492c0fa1-f821-496a-86c2-f7686479a733" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.592s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.832038] env[68569]: DEBUG oslo_concurrency.lockutils [None req-85b79284-ff56-4b0d-8118-f314603c0e40 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "ab021831-2cc3-4457-aa55-b55036c2a423" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.051s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.845856] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167088, 'name': CreateVM_Task, 'duration_secs': 0.375377} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.850752] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 848.851918] env[68569]: DEBUG nova.compute.manager [req-4a436d32-efd2-432c-b28f-c743067e7af2 req-0bb02f65-f1d4-4e7f-a0ee-b66f0b76abbb service nova] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Received event network-vif-deleted-502ae245-07dd-43f0-a1dc-c733e5f5cd3a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 848.852117] env[68569]: INFO nova.compute.manager [req-4a436d32-efd2-432c-b28f-c743067e7af2 req-0bb02f65-f1d4-4e7f-a0ee-b66f0b76abbb service nova] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Neutron deleted interface 502ae245-07dd-43f0-a1dc-c733e5f5cd3a; detaching it from the instance and deleting it from the info cache [ 848.852290] env[68569]: DEBUG nova.network.neutron [req-4a436d32-efd2-432c-b28f-c743067e7af2 req-0bb02f65-f1d4-4e7f-a0ee-b66f0b76abbb service nova] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.854394] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.854514] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.854825] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 848.855083] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8168eaa-021a-474c-ba9f-5899019dae02 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.861866] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Waiting for the task: (returnval){ [ 848.861866] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524dc284-f739-b5ac-c5eb-435dd3b3a603" [ 848.861866] env[68569]: _type = "Task" [ 848.861866] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.875286] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524dc284-f739-b5ac-c5eb-435dd3b3a603, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.944979] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167085, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52456} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.945246] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 123a6895-af16-493a-afce-7ae6c2137422/123a6895-af16-493a-afce-7ae6c2137422.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 848.945450] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 848.945695] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46812c45-45c7-44f3-a5f1-b22fbef2e168 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.951832] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 848.951832] env[68569]: value = "task-3167089" [ 848.951832] env[68569]: _type = "Task" [ 848.951832] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.961872] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167089, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.997450] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473c5486-d0c9-4755-9d67-0cb33984b4c4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.004984] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933aceff-57e9-4f1c-a6f0-e9efa6fba6cc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.041859] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cab0f00-6363-414b-b578-982c2ca97392 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.051031] env[68569]: INFO nova.compute.manager [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Rebuilding instance [ 849.053193] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f9d284-1dfd-4e38-b438-63f2526e09ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.070146] env[68569]: DEBUG nova.compute.provider_tree [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 849.076633] env[68569]: DEBUG nova.network.neutron [req-45b99963-ccae-4179-8e04-5fa2030d7cb7 req-f1d401bb-a325-49ab-9560-d139b9255e21 service nova] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Updated VIF entry in instance network info cache for port 7eb93f86-e36c-4fa0-838c-ff5929a60333. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 849.076963] env[68569]: DEBUG nova.network.neutron [req-45b99963-ccae-4179-8e04-5fa2030d7cb7 req-f1d401bb-a325-49ab-9560-d139b9255e21 service nova] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Updating instance_info_cache with network_info: [{"id": "7eb93f86-e36c-4fa0-838c-ff5929a60333", "address": "fa:16:3e:65:92:91", "network": {"id": "f8fbe9cc-a280-4b7d-929f-5595f6ac8084", "bridge": "br-int", "label": "tempest-ServersTestJSON-71300592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c76d37ddf9e04b56af59b059c0646318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eb93f86-e3", "ovs_interfaceid": "7eb93f86-e36c-4fa0-838c-ff5929a60333", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.106214] env[68569]: DEBUG nova.compute.manager [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 849.107066] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8e46da-6d87-48da-a4b2-ccf93ec10cc3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.209520] env[68569]: DEBUG nova.network.neutron [-] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.340608] env[68569]: DEBUG nova.compute.manager [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 849.354773] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-645f2dcf-034c-443c-a3d0-d8ae3189e410 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.365159] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ba08d4-6600-4302-823b-4e22aba1c1ca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.385614] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524dc284-f739-b5ac-c5eb-435dd3b3a603, 'name': SearchDatastore_Task, 'duration_secs': 0.028197} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.386762] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.387175] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 849.387704] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.387794] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.387981] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 849.401023] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68f90633-e87b-42e0-a9f0-6f70508e407d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.403123] env[68569]: DEBUG nova.compute.manager [req-4a436d32-efd2-432c-b28f-c743067e7af2 req-0bb02f65-f1d4-4e7f-a0ee-b66f0b76abbb service nova] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Detach interface failed, port_id=502ae245-07dd-43f0-a1dc-c733e5f5cd3a, reason: Instance de2b0206-0c73-4275-89ff-37199520dd71 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 849.418628] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 849.418885] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 849.419705] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ccb3182-7efc-4e31-9834-8087e13051be {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.425183] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Waiting for the task: (returnval){ [ 849.425183] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5241ee77-ca2b-9521-e991-4557dd71b15f" [ 849.425183] env[68569]: _type = "Task" [ 849.425183] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.433188] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5241ee77-ca2b-9521-e991-4557dd71b15f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.460986] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167089, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069623} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.461271] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 849.462054] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8f171c-a934-4056-85d8-a9c0e101fb25 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.483777] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 123a6895-af16-493a-afce-7ae6c2137422/123a6895-af16-493a-afce-7ae6c2137422.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 849.484093] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1ba0587-bab8-4e43-a20f-81258701fd8f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.503932] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 849.503932] env[68569]: value = "task-3167090" [ 849.503932] env[68569]: _type = "Task" [ 849.503932] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.512276] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167090, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.581461] env[68569]: DEBUG oslo_concurrency.lockutils [req-45b99963-ccae-4179-8e04-5fa2030d7cb7 req-f1d401bb-a325-49ab-9560-d139b9255e21 service nova] Releasing lock "refresh_cache-060fc4c8-b173-4fc4-8232-e13e3eac9dc3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.592200] env[68569]: ERROR nova.scheduler.client.report [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [req-58b8564c-5b65-4eda-b32e-a26380a6ef2e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-58b8564c-5b65-4eda-b32e-a26380a6ef2e"}]} [ 849.608053] env[68569]: DEBUG nova.scheduler.client.report [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 849.622912] env[68569]: DEBUG nova.scheduler.client.report [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 849.623178] env[68569]: DEBUG nova.compute.provider_tree [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 849.635208] env[68569]: DEBUG nova.scheduler.client.report [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 849.654047] env[68569]: DEBUG nova.scheduler.client.report [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 849.714013] env[68569]: INFO nova.compute.manager [-] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Took 1.37 seconds to deallocate network for instance. [ 849.860780] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.937274] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5241ee77-ca2b-9521-e991-4557dd71b15f, 'name': SearchDatastore_Task, 'duration_secs': 0.037251} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.938068] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f766df01-2fbe-40cd-acb6-f317b2350b1e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.945162] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Waiting for the task: (returnval){ [ 849.945162] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529c1cf2-6d24-ef06-00ce-c50d7cd160a8" [ 849.945162] env[68569]: _type = "Task" [ 849.945162] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.952370] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529c1cf2-6d24-ef06-00ce-c50d7cd160a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.013182] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167090, 'name': ReconfigVM_Task, 'duration_secs': 0.329025} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.013460] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 123a6895-af16-493a-afce-7ae6c2137422/123a6895-af16-493a-afce-7ae6c2137422.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 850.014128] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03e4ba2f-179c-452f-aaa7-34ab3bbd544f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.017211] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e6bd33-a4bc-4c63-a813-7fbd03c1db09 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.024811] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7005b757-3ad5-499c-aeb3-e224ac7b339b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.028034] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 850.028034] env[68569]: value = "task-3167091" [ 850.028034] env[68569]: _type = "Task" [ 850.028034] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.058767] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f09c06-320f-419a-8f6c-067cbec6fb1a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.064782] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167091, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.069700] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40a747d-c26a-43ef-a96f-cab05850f4fe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.084294] env[68569]: DEBUG nova.compute.provider_tree [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 850.119596] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 850.119917] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e653dae8-518a-40b9-9bf6-4fc3c6926086 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.126764] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 850.126764] env[68569]: value = "task-3167092" [ 850.126764] env[68569]: _type = "Task" [ 850.126764] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.136209] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167092, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.220978] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.297115] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "b91304c7-e74d-402b-865b-150e0057c895" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.297363] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "b91304c7-e74d-402b-865b-150e0057c895" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.455324] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529c1cf2-6d24-ef06-00ce-c50d7cd160a8, 'name': SearchDatastore_Task, 'duration_secs': 0.009415} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.455587] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.455842] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 060fc4c8-b173-4fc4-8232-e13e3eac9dc3/060fc4c8-b173-4fc4-8232-e13e3eac9dc3.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 850.456114] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36fe4d13-485f-4c34-bef9-a050f0ee7faa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.463049] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Waiting for the task: (returnval){ [ 850.463049] env[68569]: value = "task-3167093" [ 850.463049] env[68569]: _type = "Task" [ 850.463049] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.470717] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': task-3167093, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.537575] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167091, 'name': Rename_Task, 'duration_secs': 0.153128} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.538192] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 850.538278] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09c0b8c6-6b07-4666-beae-fb361f66c203 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.544092] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 850.544092] env[68569]: value = "task-3167094" [ 850.544092] env[68569]: _type = "Task" [ 850.544092] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.551101] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167094, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.605913] env[68569]: ERROR nova.scheduler.client.report [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [req-5f225886-b51d-4bcc-bb62-f4139a0eca69] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5f225886-b51d-4bcc-bb62-f4139a0eca69"}]} [ 850.624868] env[68569]: DEBUG nova.scheduler.client.report [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 850.637434] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167092, 'name': PowerOffVM_Task, 'duration_secs': 0.210223} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.637659] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 850.637882] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 850.638859] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5208ca26-0bf0-42e6-bec8-67d5b26051a0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.642203] env[68569]: DEBUG nova.scheduler.client.report [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 850.642409] env[68569]: DEBUG nova.compute.provider_tree [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 850.648709] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 850.648965] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89eb5a01-8cc4-4642-aba3-ba0a517bcccc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.653785] env[68569]: DEBUG nova.scheduler.client.report [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 850.674787] env[68569]: DEBUG nova.scheduler.client.report [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 850.714665] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 850.714891] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 850.715087] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleting the datastore file [datastore1] ab021831-2cc3-4457-aa55-b55036c2a423 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 850.715385] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f11e82c-c4de-41f7-8fd7-d71993e8538f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.722967] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 850.722967] env[68569]: value = "task-3167096" [ 850.722967] env[68569]: _type = "Task" [ 850.722967] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.733965] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167096, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.975200] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': task-3167093, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.057821] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167094, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.096501] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b126ce32-8cbd-4b9d-bba4-3f518e77c098 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.103393] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b15ff75-8bcd-4b31-8ddb-86c9ef596258 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.132776] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3249305b-84a5-4196-935e-b7fd0bb43624 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.139691] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c89d568-d2bd-4492-892e-5c755da51e20 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.152680] env[68569]: DEBUG nova.compute.provider_tree [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 851.232668] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167096, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.473034] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': task-3167093, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617794} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.473307] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 060fc4c8-b173-4fc4-8232-e13e3eac9dc3/060fc4c8-b173-4fc4-8232-e13e3eac9dc3.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 851.473522] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 851.473773] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca88aa7b-2e82-453f-9e62-6c1c2497a186 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.479868] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Waiting for the task: (returnval){ [ 851.479868] env[68569]: value = "task-3167097" [ 851.479868] env[68569]: _type = "Task" [ 851.479868] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.490527] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': task-3167097, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.554042] env[68569]: DEBUG oslo_vmware.api [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167094, 'name': PowerOnVM_Task, 'duration_secs': 0.538345} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.554299] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 851.554501] env[68569]: INFO nova.compute.manager [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Took 7.91 seconds to spawn the instance on the hypervisor. [ 851.554677] env[68569]: DEBUG nova.compute.manager [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 851.555435] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d062d7d-712b-49b6-8da8-d21257955ed9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.686309] env[68569]: DEBUG nova.scheduler.client.report [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 97 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 851.686661] env[68569]: DEBUG nova.compute.provider_tree [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 97 to 98 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 851.686885] env[68569]: DEBUG nova.compute.provider_tree [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 851.732709] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167096, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.602622} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.733023] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 851.733211] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 851.733385] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 851.989529] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': task-3167097, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062696} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.989804] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 851.990576] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d34d1cc-6451-4994-be6f-14b65cdd5de1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.012057] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 060fc4c8-b173-4fc4-8232-e13e3eac9dc3/060fc4c8-b173-4fc4-8232-e13e3eac9dc3.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 852.012057] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48956721-d448-4c45-a248-14090e98c870 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.031346] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Waiting for the task: (returnval){ [ 852.031346] env[68569]: value = "task-3167098" [ 852.031346] env[68569]: _type = "Task" [ 852.031346] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.038899] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': task-3167098, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.070520] env[68569]: INFO nova.compute.manager [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Took 64.19 seconds to build instance. [ 852.195486] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.117s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.198145] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.635s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.200445] env[68569]: INFO nova.compute.claims [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 852.214323] env[68569]: INFO nova.scheduler.client.report [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Deleted allocations for instance 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6 [ 852.542274] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': task-3167098, 'name': ReconfigVM_Task, 'duration_secs': 0.285218} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.542648] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 060fc4c8-b173-4fc4-8232-e13e3eac9dc3/060fc4c8-b173-4fc4-8232-e13e3eac9dc3.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 852.543451] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28b80996-574b-44fe-9b84-521b1dc5ada4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.549791] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Waiting for the task: (returnval){ [ 852.549791] env[68569]: value = "task-3167099" [ 852.549791] env[68569]: _type = "Task" [ 852.549791] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.557835] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': task-3167099, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.573838] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6c0e9ee2-bd65-45bf-8f01-a4ba41d19aea tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "123a6895-af16-493a-afce-7ae6c2137422" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.518s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.722537] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e7e1bb61-5587-46f1-b549-03e1823e97ba tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.435s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.763816] env[68569]: DEBUG nova.virt.hardware [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 852.764368] env[68569]: DEBUG nova.virt.hardware [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 852.764655] env[68569]: DEBUG nova.virt.hardware [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 852.764930] env[68569]: DEBUG nova.virt.hardware [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 852.765738] env[68569]: DEBUG nova.virt.hardware [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 852.765738] env[68569]: DEBUG nova.virt.hardware [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 852.765738] env[68569]: DEBUG nova.virt.hardware [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 852.765738] env[68569]: DEBUG nova.virt.hardware [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 852.765917] env[68569]: DEBUG nova.virt.hardware [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 852.766014] env[68569]: DEBUG nova.virt.hardware [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 852.766220] env[68569]: DEBUG nova.virt.hardware [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 852.767267] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67150118-d1d3-43f3-ad11-e771b75a7335 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.775535] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d7791a-6190-4c8a-95c6-9aec7d82538c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.790024] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:98:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 852.796587] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 852.796767] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 852.797052] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-300cf50e-e009-43f2-97de-44bbcb07bab3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.817067] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 852.817067] env[68569]: value = "task-3167100" [ 852.817067] env[68569]: _type = "Task" [ 852.817067] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.824372] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167100, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.907347] env[68569]: INFO nova.compute.manager [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Rescuing [ 852.907631] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "refresh_cache-123a6895-af16-493a-afce-7ae6c2137422" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.907863] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquired lock "refresh_cache-123a6895-af16-493a-afce-7ae6c2137422" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.908070] env[68569]: DEBUG nova.network.neutron [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 853.068139] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': task-3167099, 'name': Rename_Task, 'duration_secs': 0.151529} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.068505] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 853.068682] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2fa27f82-cc80-458b-a2b1-9542af88333c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.077640] env[68569]: DEBUG nova.compute.manager [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 853.081056] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Waiting for the task: (returnval){ [ 853.081056] env[68569]: value = "task-3167101" [ 853.081056] env[68569]: _type = "Task" [ 853.081056] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.092554] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': task-3167101, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.329020] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167100, 'name': CreateVM_Task, 'duration_secs': 0.294919} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.329209] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 853.329971] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.330221] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.330562] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 853.330834] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0455a9fd-a9b6-4641-869e-655d75981144 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.337737] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 853.337737] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52db2a57-d5ba-97eb-3374-1774afd9d591" [ 853.337737] env[68569]: _type = "Task" [ 853.337737] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.347179] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52db2a57-d5ba-97eb-3374-1774afd9d591, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.579865] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b3c8a9c-1aa4-4174-953a-6e08559790c6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.595024] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16aa7208-bfba-4e8b-a0d4-2ac480efa819 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.599994] env[68569]: DEBUG oslo_vmware.api [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': task-3167101, 'name': PowerOnVM_Task, 'duration_secs': 0.441802} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.600923] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.601566] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 853.601774] env[68569]: INFO nova.compute.manager [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Took 7.50 seconds to spawn the instance on the hypervisor. [ 853.601948] env[68569]: DEBUG nova.compute.manager [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 853.602696] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7112807-1e55-42de-a7c2-491d2c526e01 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.629717] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1634f73-22eb-40b4-925b-242b365d013b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.642731] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "3551627b-9c90-43ea-bae7-d186eaa53c6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.642731] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "3551627b-9c90-43ea-bae7-d186eaa53c6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.646134] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead6bf84-dc12-48d7-83c3-6de190e9f57b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.660946] env[68569]: DEBUG nova.compute.provider_tree [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.697128] env[68569]: DEBUG nova.network.neutron [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Updating instance_info_cache with network_info: [{"id": "4febb0a1-9ff1-4755-b668-4e2723df003b", "address": "fa:16:3e:52:96:c0", "network": {"id": "f10bb94d-d811-400a-ada9-30d7720c1ddc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2037478415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "03a77ea99dbb445396a7ac5888135321", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4febb0a1-9f", "ovs_interfaceid": "4febb0a1-9ff1-4755-b668-4e2723df003b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.850828] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52db2a57-d5ba-97eb-3374-1774afd9d591, 'name': SearchDatastore_Task, 'duration_secs': 0.010449} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.851248] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.851478] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 853.851711] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.851856] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.852063] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 853.852305] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e42b4495-c6a2-4a3d-9e79-16e0486b1461 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.860770] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 853.860970] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 853.861703] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8777071d-41c2-42d7-ac24-9158fb7a4296 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.866964] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 853.866964] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f552d5-a053-e0a5-6eda-f48bfb30778c" [ 853.866964] env[68569]: _type = "Task" [ 853.866964] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.874448] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f552d5-a053-e0a5-6eda-f48bfb30778c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.154187] env[68569]: INFO nova.compute.manager [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Took 61.02 seconds to build instance. [ 854.166363] env[68569]: DEBUG nova.scheduler.client.report [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 854.200539] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Releasing lock "refresh_cache-123a6895-af16-493a-afce-7ae6c2137422" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.377862] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f552d5-a053-e0a5-6eda-f48bfb30778c, 'name': SearchDatastore_Task, 'duration_secs': 0.008583} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.378647] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3ee5d16-6feb-470b-8039-ed9fdcfa0fb7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.383641] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 854.383641] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520d530e-60e4-c7b5-a7c6-91227e278819" [ 854.383641] env[68569]: _type = "Task" [ 854.383641] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.391107] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520d530e-60e4-c7b5-a7c6-91227e278819, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.656703] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0a317d4-3aba-49d9-8b14-798bb576c922 tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Lock "060fc4c8-b173-4fc4-8232-e13e3eac9dc3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.960s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.671424] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.671902] env[68569]: DEBUG nova.compute.manager [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 854.674449] env[68569]: DEBUG oslo_concurrency.lockutils [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 40.514s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.674634] env[68569]: DEBUG nova.objects.instance [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] [instance: 50abc994-682a-40d6-ae77-601839b98793] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68569) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 854.893966] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520d530e-60e4-c7b5-a7c6-91227e278819, 'name': SearchDatastore_Task, 'duration_secs': 0.009402} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.894273] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.894537] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] ab021831-2cc3-4457-aa55-b55036c2a423/ab021831-2cc3-4457-aa55-b55036c2a423.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 854.894790] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2b19def-78ca-4450-accf-67bc50b175c9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.901777] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 854.901777] env[68569]: value = "task-3167102" [ 854.901777] env[68569]: _type = "Task" [ 854.901777] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.910377] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.162183] env[68569]: DEBUG nova.compute.manager [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 855.179263] env[68569]: DEBUG nova.compute.utils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 855.184395] env[68569]: DEBUG nova.compute.manager [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 855.186891] env[68569]: DEBUG nova.network.neutron [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 855.255158] env[68569]: DEBUG nova.policy [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7734f1c6556e44cd8088438e474da6e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '625a1b6fb894446f82d06e9abdea28ea', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 855.418378] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167102, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48227} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.418827] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] ab021831-2cc3-4457-aa55-b55036c2a423/ab021831-2cc3-4457-aa55-b55036c2a423.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 855.420023] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 855.420023] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-23d5658c-e88f-4384-aded-64d156317345 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.426040] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 855.426040] env[68569]: value = "task-3167103" [ 855.426040] env[68569]: _type = "Task" [ 855.426040] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.435153] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167103, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.608546] env[68569]: DEBUG nova.compute.manager [req-599954ae-d651-4490-9c12-5d1b901daace req-33ef26cb-d463-4da1-99a2-842a03c2290a service nova] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Received event network-changed-7eb93f86-e36c-4fa0-838c-ff5929a60333 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 855.608743] env[68569]: DEBUG nova.compute.manager [req-599954ae-d651-4490-9c12-5d1b901daace req-33ef26cb-d463-4da1-99a2-842a03c2290a service nova] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Refreshing instance network info cache due to event network-changed-7eb93f86-e36c-4fa0-838c-ff5929a60333. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 855.608953] env[68569]: DEBUG oslo_concurrency.lockutils [req-599954ae-d651-4490-9c12-5d1b901daace req-33ef26cb-d463-4da1-99a2-842a03c2290a service nova] Acquiring lock "refresh_cache-060fc4c8-b173-4fc4-8232-e13e3eac9dc3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.609108] env[68569]: DEBUG oslo_concurrency.lockutils [req-599954ae-d651-4490-9c12-5d1b901daace req-33ef26cb-d463-4da1-99a2-842a03c2290a service nova] Acquired lock "refresh_cache-060fc4c8-b173-4fc4-8232-e13e3eac9dc3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.609265] env[68569]: DEBUG nova.network.neutron [req-599954ae-d651-4490-9c12-5d1b901daace req-33ef26cb-d463-4da1-99a2-842a03c2290a service nova] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Refreshing network info cache for port 7eb93f86-e36c-4fa0-838c-ff5929a60333 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 855.684891] env[68569]: DEBUG nova.compute.manager [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 855.693468] env[68569]: DEBUG oslo_concurrency.lockutils [None req-64a9b13b-8f62-41c2-bd08-6af3403b725b tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.695751] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.488s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.700433] env[68569]: INFO nova.compute.claims [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 855.706158] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.739434] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 855.739434] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed886111-553a-4c42-b712-9b0ea91d4e29 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.746020] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 855.746020] env[68569]: value = "task-3167104" [ 855.746020] env[68569]: _type = "Task" [ 855.746020] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.749808] env[68569]: DEBUG nova.network.neutron [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Successfully created port: eb7c5f24-ece0-4c55-86ec-3c90bc431594 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 855.757381] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167104, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.937171] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167103, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.309835} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.937171] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 855.938582] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b1015c-2386-4a46-8e04-223f34e45b87 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.962083] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] ab021831-2cc3-4457-aa55-b55036c2a423/ab021831-2cc3-4457-aa55-b55036c2a423.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 855.962556] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e14ae4ac-ead0-42de-982f-01056ac06de0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.983215] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 855.983215] env[68569]: value = "task-3167105" [ 855.983215] env[68569]: _type = "Task" [ 855.983215] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.993636] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167105, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.255148] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167104, 'name': PowerOffVM_Task, 'duration_secs': 0.317629} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.257599] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 856.258622] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9e5ebe-a817-4a3a-8e5c-c8bcde5b622f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.279875] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a851d60-35cf-4679-b22b-cdd43cb9717c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.314617] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 856.315255] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb73eb33-b823-4292-b328-3ee98a8984eb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.321523] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 856.321523] env[68569]: value = "task-3167106" [ 856.321523] env[68569]: _type = "Task" [ 856.321523] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.332200] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] VM already powered off {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 856.332400] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 856.332644] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.332787] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.332963] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 856.333290] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91ab64f1-8716-4d49-a9c9-401c59b60246 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.341349] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 856.341525] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 856.342244] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d09ea23-45b4-462d-b3ac-eed9fa4d9fdf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.347645] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 856.347645] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a4f75e-5ac8-f71c-9259-92e50a7018e7" [ 856.347645] env[68569]: _type = "Task" [ 856.347645] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.355951] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a4f75e-5ac8-f71c-9259-92e50a7018e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.393175] env[68569]: DEBUG nova.network.neutron [req-599954ae-d651-4490-9c12-5d1b901daace req-33ef26cb-d463-4da1-99a2-842a03c2290a service nova] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Updated VIF entry in instance network info cache for port 7eb93f86-e36c-4fa0-838c-ff5929a60333. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 856.393694] env[68569]: DEBUG nova.network.neutron [req-599954ae-d651-4490-9c12-5d1b901daace req-33ef26cb-d463-4da1-99a2-842a03c2290a service nova] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Updating instance_info_cache with network_info: [{"id": "7eb93f86-e36c-4fa0-838c-ff5929a60333", "address": "fa:16:3e:65:92:91", "network": {"id": "f8fbe9cc-a280-4b7d-929f-5595f6ac8084", "bridge": "br-int", "label": "tempest-ServersTestJSON-71300592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c76d37ddf9e04b56af59b059c0646318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eb93f86-e3", "ovs_interfaceid": "7eb93f86-e36c-4fa0-838c-ff5929a60333", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.493873] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167105, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.710023] env[68569]: DEBUG nova.compute.manager [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 856.729979] env[68569]: DEBUG nova.virt.hardware [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 856.730250] env[68569]: DEBUG nova.virt.hardware [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 856.730408] env[68569]: DEBUG nova.virt.hardware [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 856.730585] env[68569]: DEBUG nova.virt.hardware [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 856.730727] env[68569]: DEBUG nova.virt.hardware [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 856.730870] env[68569]: DEBUG nova.virt.hardware [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 856.731114] env[68569]: DEBUG nova.virt.hardware [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 856.731296] env[68569]: DEBUG nova.virt.hardware [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 856.731463] env[68569]: DEBUG nova.virt.hardware [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 856.731662] env[68569]: DEBUG nova.virt.hardware [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 856.731838] env[68569]: DEBUG nova.virt.hardware [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 856.732780] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75a9b79-3e53-40dc-8b0d-4592d9bbaa91 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.743456] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8530320f-81d0-463d-b177-b32523cd4ad7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.859070] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a4f75e-5ac8-f71c-9259-92e50a7018e7, 'name': SearchDatastore_Task, 'duration_secs': 0.010159} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.859848] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6599dfe2-70fe-4da8-bec0-8f9a45a11b5c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.866378] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 856.866378] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529c2d8d-0e09-cda3-8afd-f5f10cb19b98" [ 856.866378] env[68569]: _type = "Task" [ 856.866378] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.873539] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529c2d8d-0e09-cda3-8afd-f5f10cb19b98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.896328] env[68569]: DEBUG oslo_concurrency.lockutils [req-599954ae-d651-4490-9c12-5d1b901daace req-33ef26cb-d463-4da1-99a2-842a03c2290a service nova] Releasing lock "refresh_cache-060fc4c8-b173-4fc4-8232-e13e3eac9dc3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.993572] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167105, 'name': ReconfigVM_Task, 'duration_secs': 0.635117} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.993925] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Reconfigured VM instance instance-0000003a to attach disk [datastore1] ab021831-2cc3-4457-aa55-b55036c2a423/ab021831-2cc3-4457-aa55-b55036c2a423.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 856.997079] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77f7fba1-9aaa-4d3e-8e86-699dd24e0001 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.003711] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 857.003711] env[68569]: value = "task-3167107" [ 857.003711] env[68569]: _type = "Task" [ 857.003711] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.012536] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167107, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.064015] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfd0bf2-c25a-4771-95a6-a8b1a4fb6b90 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.071223] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ba7caf-bb6c-4128-b80a-17173fd23b4d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.103060] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d3e625-0b0a-45b1-8799-02d20e365979 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.109766] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db252dd0-2cdb-4402-bd75-660bbbfd53d9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.123080] env[68569]: DEBUG nova.compute.provider_tree [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 857.341747] env[68569]: DEBUG nova.network.neutron [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Successfully updated port: eb7c5f24-ece0-4c55-86ec-3c90bc431594 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 857.376371] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529c2d8d-0e09-cda3-8afd-f5f10cb19b98, 'name': SearchDatastore_Task, 'duration_secs': 0.053086} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.376633] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 857.376892] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 123a6895-af16-493a-afce-7ae6c2137422/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk. {{(pid=68569) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 857.377172] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6770f736-2578-4ee6-8823-9a989a86d410 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.384446] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 857.384446] env[68569]: value = "task-3167108" [ 857.384446] env[68569]: _type = "Task" [ 857.384446] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.392016] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167108, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.513414] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167107, 'name': Rename_Task, 'duration_secs': 0.399062} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.513639] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 857.513877] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87f18d64-c4ef-4df7-9ff8-7c6714feb299 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.519551] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 857.519551] env[68569]: value = "task-3167109" [ 857.519551] env[68569]: _type = "Task" [ 857.519551] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.526956] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167109, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.647651] env[68569]: ERROR nova.scheduler.client.report [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [req-3427c37b-6769-43fe-9e9e-c6ed0e9ec866] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3427c37b-6769-43fe-9e9e-c6ed0e9ec866"}]} [ 857.667287] env[68569]: DEBUG nova.scheduler.client.report [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 857.672241] env[68569]: DEBUG nova.compute.manager [req-12e68ad3-9010-4067-892d-a255373e4b41 req-6318a710-7a3b-4fde-8992-77aeb990c600 service nova] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Received event network-vif-plugged-eb7c5f24-ece0-4c55-86ec-3c90bc431594 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 857.672455] env[68569]: DEBUG oslo_concurrency.lockutils [req-12e68ad3-9010-4067-892d-a255373e4b41 req-6318a710-7a3b-4fde-8992-77aeb990c600 service nova] Acquiring lock "a63b06a1-c24e-4013-a1f4-b227732a1e05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.672656] env[68569]: DEBUG oslo_concurrency.lockutils [req-12e68ad3-9010-4067-892d-a255373e4b41 req-6318a710-7a3b-4fde-8992-77aeb990c600 service nova] Lock "a63b06a1-c24e-4013-a1f4-b227732a1e05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.672830] env[68569]: DEBUG oslo_concurrency.lockutils [req-12e68ad3-9010-4067-892d-a255373e4b41 req-6318a710-7a3b-4fde-8992-77aeb990c600 service nova] Lock "a63b06a1-c24e-4013-a1f4-b227732a1e05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.673059] env[68569]: DEBUG nova.compute.manager [req-12e68ad3-9010-4067-892d-a255373e4b41 req-6318a710-7a3b-4fde-8992-77aeb990c600 service nova] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] No waiting events found dispatching network-vif-plugged-eb7c5f24-ece0-4c55-86ec-3c90bc431594 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 857.673308] env[68569]: WARNING nova.compute.manager [req-12e68ad3-9010-4067-892d-a255373e4b41 req-6318a710-7a3b-4fde-8992-77aeb990c600 service nova] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Received unexpected event network-vif-plugged-eb7c5f24-ece0-4c55-86ec-3c90bc431594 for instance with vm_state building and task_state spawning. [ 857.673468] env[68569]: DEBUG nova.compute.manager [req-12e68ad3-9010-4067-892d-a255373e4b41 req-6318a710-7a3b-4fde-8992-77aeb990c600 service nova] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Received event network-changed-eb7c5f24-ece0-4c55-86ec-3c90bc431594 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 857.673577] env[68569]: DEBUG nova.compute.manager [req-12e68ad3-9010-4067-892d-a255373e4b41 req-6318a710-7a3b-4fde-8992-77aeb990c600 service nova] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Refreshing instance network info cache due to event network-changed-eb7c5f24-ece0-4c55-86ec-3c90bc431594. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 857.673722] env[68569]: DEBUG oslo_concurrency.lockutils [req-12e68ad3-9010-4067-892d-a255373e4b41 req-6318a710-7a3b-4fde-8992-77aeb990c600 service nova] Acquiring lock "refresh_cache-a63b06a1-c24e-4013-a1f4-b227732a1e05" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.673928] env[68569]: DEBUG oslo_concurrency.lockutils [req-12e68ad3-9010-4067-892d-a255373e4b41 req-6318a710-7a3b-4fde-8992-77aeb990c600 service nova] Acquired lock "refresh_cache-a63b06a1-c24e-4013-a1f4-b227732a1e05" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 857.673988] env[68569]: DEBUG nova.network.neutron [req-12e68ad3-9010-4067-892d-a255373e4b41 req-6318a710-7a3b-4fde-8992-77aeb990c600 service nova] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Refreshing network info cache for port eb7c5f24-ece0-4c55-86ec-3c90bc431594 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 857.683049] env[68569]: DEBUG nova.scheduler.client.report [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 857.683302] env[68569]: DEBUG nova.compute.provider_tree [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 857.700750] env[68569]: DEBUG nova.scheduler.client.report [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 857.721373] env[68569]: DEBUG nova.scheduler.client.report [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 857.845211] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Acquiring lock "refresh_cache-a63b06a1-c24e-4013-a1f4-b227732a1e05" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.896796] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167108, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504942} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.897108] env[68569]: INFO nova.virt.vmwareapi.ds_util [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 123a6895-af16-493a-afce-7ae6c2137422/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk. [ 857.897883] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4d1887-7276-44d5-a39d-e57e44651235 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.925546] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 123a6895-af16-493a-afce-7ae6c2137422/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 857.928053] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0d2c457-b1e3-48a3-a1d0-247b84041209 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.946402] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 857.946402] env[68569]: value = "task-3167110" [ 857.946402] env[68569]: _type = "Task" [ 857.946402] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.957801] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167110, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.029755] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167109, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.137275] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9396a37d-691b-4427-ab05-2654cea91b62 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.145144] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6894e9-c257-4a3f-8e63-2b89e4ff4c61 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.184847] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd128e45-e4fa-476e-9af5-6292d8124c64 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.193364] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c89edbc-40b9-4fa0-86e3-38c9e3bdd0ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.209867] env[68569]: DEBUG nova.compute.provider_tree [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 858.227806] env[68569]: DEBUG nova.network.neutron [req-12e68ad3-9010-4067-892d-a255373e4b41 req-6318a710-7a3b-4fde-8992-77aeb990c600 service nova] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 858.323926] env[68569]: DEBUG nova.network.neutron [req-12e68ad3-9010-4067-892d-a255373e4b41 req-6318a710-7a3b-4fde-8992-77aeb990c600 service nova] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.459179] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167110, 'name': ReconfigVM_Task, 'duration_secs': 0.295392} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.459497] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 123a6895-af16-493a-afce-7ae6c2137422/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 858.460729] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f574fd-9c3b-41b4-a63f-dc3c60e5f8c8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.488032] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af926cc2-fc6c-43fd-80e9-b69eb697da98 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.502288] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 858.502288] env[68569]: value = "task-3167111" [ 858.502288] env[68569]: _type = "Task" [ 858.502288] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.509759] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167111, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.528468] env[68569]: DEBUG oslo_vmware.api [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167109, 'name': PowerOnVM_Task, 'duration_secs': 0.669847} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.528741] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 858.528988] env[68569]: DEBUG nova.compute.manager [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 858.529765] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ab59ab-c2c1-4ee8-b218-d95321ea8084 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.741565] env[68569]: DEBUG nova.scheduler.client.report [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 99 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 858.741826] env[68569]: DEBUG nova.compute.provider_tree [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 99 to 100 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 858.742019] env[68569]: DEBUG nova.compute.provider_tree [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 858.827898] env[68569]: DEBUG oslo_concurrency.lockutils [req-12e68ad3-9010-4067-892d-a255373e4b41 req-6318a710-7a3b-4fde-8992-77aeb990c600 service nova] Releasing lock "refresh_cache-a63b06a1-c24e-4013-a1f4-b227732a1e05" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.828318] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Acquired lock "refresh_cache-a63b06a1-c24e-4013-a1f4-b227732a1e05" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.828481] env[68569]: DEBUG nova.network.neutron [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 859.011878] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167111, 'name': ReconfigVM_Task, 'duration_secs': 0.145874} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.012163] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 859.012407] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13471fb5-675e-485e-81c6-ccd37c818fc2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.018667] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 859.018667] env[68569]: value = "task-3167112" [ 859.018667] env[68569]: _type = "Task" [ 859.018667] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.032366] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167112, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.046654] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.247120] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.552s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.247795] env[68569]: DEBUG nova.compute.manager [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 859.251022] env[68569]: DEBUG oslo_concurrency.lockutils [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 36.142s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.251287] env[68569]: DEBUG nova.objects.instance [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] [instance: 50abc994-682a-40d6-ae77-601839b98793] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68569) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 859.487101] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "ab021831-2cc3-4457-aa55-b55036c2a423" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.487101] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "ab021831-2cc3-4457-aa55-b55036c2a423" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.487101] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "ab021831-2cc3-4457-aa55-b55036c2a423-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.487748] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "ab021831-2cc3-4457-aa55-b55036c2a423-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.488115] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "ab021831-2cc3-4457-aa55-b55036c2a423-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.490414] env[68569]: INFO nova.compute.manager [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Terminating instance [ 859.530347] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167112, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.605019] env[68569]: DEBUG nova.network.neutron [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 859.757767] env[68569]: DEBUG nova.compute.utils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 859.762393] env[68569]: DEBUG nova.compute.manager [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 859.762393] env[68569]: DEBUG nova.network.neutron [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 859.797951] env[68569]: DEBUG nova.network.neutron [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Updating instance_info_cache with network_info: [{"id": "eb7c5f24-ece0-4c55-86ec-3c90bc431594", "address": "fa:16:3e:40:44:5b", "network": {"id": "b47c368e-fa0c-432e-afd1-5161f07e6a5f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1694204290-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "625a1b6fb894446f82d06e9abdea28ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb7c5f24-ec", "ovs_interfaceid": "eb7c5f24-ece0-4c55-86ec-3c90bc431594", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.818582] env[68569]: DEBUG nova.policy [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b5878b8c7304fce9e150e9be38f10c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7613bcf5361d4b08a8d864e59b7fe858', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 859.931048] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "c0211ed8-5995-48f4-b339-99bd4c93254c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.931048] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "c0211ed8-5995-48f4-b339-99bd4c93254c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.995974] env[68569]: DEBUG nova.compute.manager [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 859.995974] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 859.997053] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ad862d-8326-4db2-8b29-c715ff39b8ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.008828] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 860.009250] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c98afc50-fcd3-49e2-b35c-685891b72d4e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.017784] env[68569]: DEBUG oslo_vmware.api [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 860.017784] env[68569]: value = "task-3167113" [ 860.017784] env[68569]: _type = "Task" [ 860.017784] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.034235] env[68569]: DEBUG oslo_vmware.api [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167113, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.040207] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167112, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.186955] env[68569]: DEBUG nova.network.neutron [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Successfully created port: 324818fd-bb45-4575-9e4a-bb6516576dde {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 860.263029] env[68569]: DEBUG nova.compute.manager [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 860.266664] env[68569]: DEBUG oslo_concurrency.lockutils [None req-361bcfac-711f-4c27-9ebc-8f8775c984d0 tempest-ServersAdmin275Test-562968262 tempest-ServersAdmin275Test-562968262-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.267806] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.787s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.269180] env[68569]: INFO nova.compute.claims [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 860.300370] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Releasing lock "refresh_cache-a63b06a1-c24e-4013-a1f4-b227732a1e05" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 860.300551] env[68569]: DEBUG nova.compute.manager [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Instance network_info: |[{"id": "eb7c5f24-ece0-4c55-86ec-3c90bc431594", "address": "fa:16:3e:40:44:5b", "network": {"id": "b47c368e-fa0c-432e-afd1-5161f07e6a5f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1694204290-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "625a1b6fb894446f82d06e9abdea28ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb7c5f24-ec", "ovs_interfaceid": "eb7c5f24-ece0-4c55-86ec-3c90bc431594", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 860.300886] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:44:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '028bae2d-fe6c-4207-b4a3-3fab45fbf1d6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb7c5f24-ece0-4c55-86ec-3c90bc431594', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 860.314512] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Creating folder: Project (625a1b6fb894446f82d06e9abdea28ea). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 860.316293] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c748a2dd-ba09-4694-92c7-875fa7dbd4b0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.329922] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Created folder: Project (625a1b6fb894446f82d06e9abdea28ea) in parent group-v633430. [ 860.329922] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Creating folder: Instances. Parent ref: group-v633606. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 860.329922] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f31bdb49-38f0-4d87-a7b0-bd2a14fec8f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.338294] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Created folder: Instances in parent group-v633606. [ 860.338524] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 860.338712] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 860.338918] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-daceb97b-9519-4eb8-b280-cb199014646d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.359868] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 860.359868] env[68569]: value = "task-3167116" [ 860.359868] env[68569]: _type = "Task" [ 860.359868] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.369871] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167116, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.536897] env[68569]: DEBUG oslo_vmware.api [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167113, 'name': PowerOffVM_Task, 'duration_secs': 0.211703} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.541643] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 860.541955] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 860.542375] env[68569]: DEBUG oslo_vmware.api [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167112, 'name': PowerOnVM_Task, 'duration_secs': 1.063883} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.542672] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1af2c402-42fb-4ad5-af53-cd3c4cc37bf9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.544968] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 860.548876] env[68569]: DEBUG nova.compute.manager [None req-5f8ebc75-8157-41d7-ab4e-9e770fa97dc3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 860.549855] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04196d3a-451c-4c07-a3ce-414ee87fcd1f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.607085] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 860.607085] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 860.607085] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleting the datastore file [datastore1] ab021831-2cc3-4457-aa55-b55036c2a423 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 860.607085] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf9f582f-ede2-4561-9c66-16869ee3cd8d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.615151] env[68569]: DEBUG oslo_vmware.api [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 860.615151] env[68569]: value = "task-3167118" [ 860.615151] env[68569]: _type = "Task" [ 860.615151] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.623794] env[68569]: DEBUG oslo_vmware.api [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167118, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.872012] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167116, 'name': CreateVM_Task, 'duration_secs': 0.361378} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.872012] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 860.872632] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.872847] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.873117] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 860.873408] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cf9aa5d-2f13-4f7b-92ef-f0ce1a888759 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.878948] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Waiting for the task: (returnval){ [ 860.878948] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525eba04-a6a1-0dbb-3273-9b22b2224416" [ 860.878948] env[68569]: _type = "Task" [ 860.878948] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.887889] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525eba04-a6a1-0dbb-3273-9b22b2224416, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.125734] env[68569]: DEBUG oslo_vmware.api [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167118, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.326375} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.125938] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 861.126138] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 861.126319] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 861.126492] env[68569]: INFO nova.compute.manager [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Took 1.13 seconds to destroy the instance on the hypervisor. [ 861.126773] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 861.126977] env[68569]: DEBUG nova.compute.manager [-] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 861.127088] env[68569]: DEBUG nova.network.neutron [-] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 861.276695] env[68569]: DEBUG nova.compute.manager [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 861.301960] env[68569]: DEBUG nova.virt.hardware [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 861.302218] env[68569]: DEBUG nova.virt.hardware [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 861.302384] env[68569]: DEBUG nova.virt.hardware [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 861.302598] env[68569]: DEBUG nova.virt.hardware [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 861.302739] env[68569]: DEBUG nova.virt.hardware [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 861.302883] env[68569]: DEBUG nova.virt.hardware [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 861.303097] env[68569]: DEBUG nova.virt.hardware [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 861.303253] env[68569]: DEBUG nova.virt.hardware [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 861.303423] env[68569]: DEBUG nova.virt.hardware [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 861.303582] env[68569]: DEBUG nova.virt.hardware [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 861.303751] env[68569]: DEBUG nova.virt.hardware [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 861.304695] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63026ace-1080-4104-abd8-ef3aa0d91ec7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.315397] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aec6b9a-9eba-4cf6-867f-00e7586b1b0d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.385861] env[68569]: DEBUG nova.compute.manager [req-4c7f41dc-0eb7-4a93-81f6-30b41905ea75 req-49581554-da35-4e33-a5b0-94bc9e86e263 service nova] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Received event network-vif-deleted-3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 861.386468] env[68569]: INFO nova.compute.manager [req-4c7f41dc-0eb7-4a93-81f6-30b41905ea75 req-49581554-da35-4e33-a5b0-94bc9e86e263 service nova] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Neutron deleted interface 3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe; detaching it from the instance and deleting it from the info cache [ 861.386468] env[68569]: DEBUG nova.network.neutron [req-4c7f41dc-0eb7-4a93-81f6-30b41905ea75 req-49581554-da35-4e33-a5b0-94bc9e86e263 service nova] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.393515] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525eba04-a6a1-0dbb-3273-9b22b2224416, 'name': SearchDatastore_Task, 'duration_secs': 0.010349} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.396195] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 861.396432] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 861.396666] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.396811] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.396990] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 861.397681] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e0768cd-67a0-45d0-8645-abe269a29d49 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.406882] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 861.407124] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 861.410186] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34ba3aa6-dfd2-4fba-815c-cd4d713568f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.417029] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Waiting for the task: (returnval){ [ 861.417029] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5204f149-bc34-4f84-7b62-e6354f1ce12d" [ 861.417029] env[68569]: _type = "Task" [ 861.417029] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.428694] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5204f149-bc34-4f84-7b62-e6354f1ce12d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.733591] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391efb91-b969-425e-9eff-de0a3614554f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.742100] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b850c2-f0fb-4089-b6c0-13a4ab058601 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.775625] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c4d568-b555-4916-9052-fb42f37ae96a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.783387] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7c84b4-90c7-484b-b428-a77f8ae9f5da {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.797550] env[68569]: DEBUG nova.compute.provider_tree [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 861.867412] env[68569]: DEBUG nova.network.neutron [-] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.888846] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0395f708-fcee-4b82-9c8c-8d596a8ac289 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.899168] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e98dbc-6118-4b4a-9ac4-8850d99b0c70 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.912524] env[68569]: DEBUG nova.compute.manager [req-27ff8d6a-fac8-41a2-adec-88243b34ae84 req-0b3b9740-909e-4e26-badf-657c681c6b17 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Received event network-vif-plugged-324818fd-bb45-4575-9e4a-bb6516576dde {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 861.912816] env[68569]: DEBUG oslo_concurrency.lockutils [req-27ff8d6a-fac8-41a2-adec-88243b34ae84 req-0b3b9740-909e-4e26-badf-657c681c6b17 service nova] Acquiring lock "6824efd5-427b-420d-83d5-a1d5acd94bf9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.913153] env[68569]: DEBUG oslo_concurrency.lockutils [req-27ff8d6a-fac8-41a2-adec-88243b34ae84 req-0b3b9740-909e-4e26-badf-657c681c6b17 service nova] Lock "6824efd5-427b-420d-83d5-a1d5acd94bf9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.913423] env[68569]: DEBUG oslo_concurrency.lockutils [req-27ff8d6a-fac8-41a2-adec-88243b34ae84 req-0b3b9740-909e-4e26-badf-657c681c6b17 service nova] Lock "6824efd5-427b-420d-83d5-a1d5acd94bf9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.913658] env[68569]: DEBUG nova.compute.manager [req-27ff8d6a-fac8-41a2-adec-88243b34ae84 req-0b3b9740-909e-4e26-badf-657c681c6b17 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] No waiting events found dispatching network-vif-plugged-324818fd-bb45-4575-9e4a-bb6516576dde {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 861.913902] env[68569]: WARNING nova.compute.manager [req-27ff8d6a-fac8-41a2-adec-88243b34ae84 req-0b3b9740-909e-4e26-badf-657c681c6b17 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Received unexpected event network-vif-plugged-324818fd-bb45-4575-9e4a-bb6516576dde for instance with vm_state building and task_state spawning. [ 861.936390] env[68569]: DEBUG nova.network.neutron [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Successfully updated port: 324818fd-bb45-4575-9e4a-bb6516576dde {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 861.937997] env[68569]: DEBUG nova.compute.manager [req-4c7f41dc-0eb7-4a93-81f6-30b41905ea75 req-49581554-da35-4e33-a5b0-94bc9e86e263 service nova] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Detach interface failed, port_id=3aef6e13-a8d2-4dbc-a3c8-5ea5ed9ef9fe, reason: Instance ab021831-2cc3-4457-aa55-b55036c2a423 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 861.945178] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5204f149-bc34-4f84-7b62-e6354f1ce12d, 'name': SearchDatastore_Task, 'duration_secs': 0.020788} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.946035] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33e0273e-c94a-4c4b-b9cd-e4dd813de5ba {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.952039] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Waiting for the task: (returnval){ [ 861.952039] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b74afd-6163-7316-f817-bc0d3d8fea8d" [ 861.952039] env[68569]: _type = "Task" [ 861.952039] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.960777] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b74afd-6163-7316-f817-bc0d3d8fea8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.320649] env[68569]: ERROR nova.scheduler.client.report [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [req-5e8e69ba-f333-41de-885e-4b121f5498ad] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5e8e69ba-f333-41de-885e-4b121f5498ad"}]} [ 862.334032] env[68569]: DEBUG nova.scheduler.client.report [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 862.348847] env[68569]: DEBUG nova.scheduler.client.report [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 862.349145] env[68569]: DEBUG nova.compute.provider_tree [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 862.361724] env[68569]: DEBUG nova.scheduler.client.report [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 862.372328] env[68569]: INFO nova.compute.manager [-] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Took 1.25 seconds to deallocate network for instance. [ 862.378317] env[68569]: DEBUG nova.scheduler.client.report [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 862.438854] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.439019] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.439167] env[68569]: DEBUG nova.network.neutron [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 862.464950] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b74afd-6163-7316-f817-bc0d3d8fea8d, 'name': SearchDatastore_Task, 'duration_secs': 0.009421} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.465482] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.465742] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] a63b06a1-c24e-4013-a1f4-b227732a1e05/a63b06a1-c24e-4013-a1f4-b227732a1e05.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 862.465999] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-417f6995-9816-4cad-ba0f-ea3e5d5d7186 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.475029] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Waiting for the task: (returnval){ [ 862.475029] env[68569]: value = "task-3167119" [ 862.475029] env[68569]: _type = "Task" [ 862.475029] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.480896] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': task-3167119, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.593639] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "7696390d-a097-4b6d-827d-92f3165a4188" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.593889] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "7696390d-a097-4b6d-827d-92f3165a4188" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.790552] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062fa306-c88e-486c-b089-438d6a4c7e29 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.800505] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f10b5a5-f0d8-42f0-bd8b-f302e0770c7e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.843488] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568568cf-285f-45bd-a7c0-8fdf1957fe06 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.851800] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13cc93b5-a84c-433d-961d-12867dd7a2e4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.866192] env[68569]: DEBUG nova.compute.provider_tree [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 862.881880] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.970378] env[68569]: DEBUG nova.network.neutron [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 862.983553] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': task-3167119, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450829} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.983825] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] a63b06a1-c24e-4013-a1f4-b227732a1e05/a63b06a1-c24e-4013-a1f4-b227732a1e05.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 862.984050] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 862.984303] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e773a467-d793-412e-8607-f8b5cc5ee7fd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.990902] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Waiting for the task: (returnval){ [ 862.990902] env[68569]: value = "task-3167120" [ 862.990902] env[68569]: _type = "Task" [ 862.990902] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.999891] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': task-3167120, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.126460] env[68569]: DEBUG nova.network.neutron [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Updating instance_info_cache with network_info: [{"id": "324818fd-bb45-4575-9e4a-bb6516576dde", "address": "fa:16:3e:2b:19:03", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap324818fd-bb", "ovs_interfaceid": "324818fd-bb45-4575-9e4a-bb6516576dde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.401983] env[68569]: DEBUG nova.scheduler.client.report [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 102 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 863.402129] env[68569]: DEBUG nova.compute.provider_tree [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 102 to 103 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 863.402204] env[68569]: DEBUG nova.compute.provider_tree [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 863.500756] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': task-3167120, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.275718} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.501068] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 863.501835] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fbee7ed-2758-4f1f-8a2c-556781c01c70 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.524960] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] a63b06a1-c24e-4013-a1f4-b227732a1e05/a63b06a1-c24e-4013-a1f4-b227732a1e05.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 863.525223] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1831e20-4c0d-488b-8b1e-364b8f5ed2c0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.543488] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Waiting for the task: (returnval){ [ 863.543488] env[68569]: value = "task-3167121" [ 863.543488] env[68569]: _type = "Task" [ 863.543488] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.551194] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': task-3167121, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.629470] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.629834] env[68569]: DEBUG nova.compute.manager [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Instance network_info: |[{"id": "324818fd-bb45-4575-9e4a-bb6516576dde", "address": "fa:16:3e:2b:19:03", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap324818fd-bb", "ovs_interfaceid": "324818fd-bb45-4575-9e4a-bb6516576dde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 863.630243] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:19:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4df917f7-847a-4c0e-b0e3-69a52e4a1554', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '324818fd-bb45-4575-9e4a-bb6516576dde', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 863.637845] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Creating folder: Project (7613bcf5361d4b08a8d864e59b7fe858). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.638143] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f85787b0-39dd-4315-b295-724be502aa88 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.647844] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Created folder: Project (7613bcf5361d4b08a8d864e59b7fe858) in parent group-v633430. [ 863.647995] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Creating folder: Instances. Parent ref: group-v633609. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.648236] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c086262f-a0a2-4463-8f30-5be1cd99ce25 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.656544] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Created folder: Instances in parent group-v633609. [ 863.656761] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 863.656935] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 863.657133] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a6027495-46fc-4b34-8e78-ad5dd24e3720 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.676749] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 863.676749] env[68569]: value = "task-3167124" [ 863.676749] env[68569]: _type = "Task" [ 863.676749] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.684202] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167124, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.907365] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.639s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.908021] env[68569]: DEBUG nova.compute.manager [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 863.910783] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.290s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.912308] env[68569]: INFO nova.compute.claims [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 863.943416] env[68569]: DEBUG nova.compute.manager [req-7fb52169-ca5a-47c8-99f1-89984e3feb85 req-fe27a26a-64de-4be6-b6aa-49213a7b0cd5 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Received event network-changed-324818fd-bb45-4575-9e4a-bb6516576dde {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 863.943624] env[68569]: DEBUG nova.compute.manager [req-7fb52169-ca5a-47c8-99f1-89984e3feb85 req-fe27a26a-64de-4be6-b6aa-49213a7b0cd5 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Refreshing instance network info cache due to event network-changed-324818fd-bb45-4575-9e4a-bb6516576dde. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 863.944160] env[68569]: DEBUG oslo_concurrency.lockutils [req-7fb52169-ca5a-47c8-99f1-89984e3feb85 req-fe27a26a-64de-4be6-b6aa-49213a7b0cd5 service nova] Acquiring lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.944160] env[68569]: DEBUG oslo_concurrency.lockutils [req-7fb52169-ca5a-47c8-99f1-89984e3feb85 req-fe27a26a-64de-4be6-b6aa-49213a7b0cd5 service nova] Acquired lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.944160] env[68569]: DEBUG nova.network.neutron [req-7fb52169-ca5a-47c8-99f1-89984e3feb85 req-fe27a26a-64de-4be6-b6aa-49213a7b0cd5 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Refreshing network info cache for port 324818fd-bb45-4575-9e4a-bb6516576dde {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 864.052860] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': task-3167121, 'name': ReconfigVM_Task, 'duration_secs': 0.330081} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.053168] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Reconfigured VM instance instance-0000003d to attach disk [datastore1] a63b06a1-c24e-4013-a1f4-b227732a1e05/a63b06a1-c24e-4013-a1f4-b227732a1e05.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 864.053868] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-201d392a-29a7-4858-8725-677d9e97f20b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.059343] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Waiting for the task: (returnval){ [ 864.059343] env[68569]: value = "task-3167125" [ 864.059343] env[68569]: _type = "Task" [ 864.059343] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.066831] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': task-3167125, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.187117] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167124, 'name': CreateVM_Task, 'duration_secs': 0.368943} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.187289] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 864.188084] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.188166] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 864.189020] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 864.189020] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ac54be7-e68f-4ab2-a07f-89817c9cf647 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.193146] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 864.193146] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5226e8e1-c310-6a0e-ccc3-66517d5e42e2" [ 864.193146] env[68569]: _type = "Task" [ 864.193146] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.200794] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5226e8e1-c310-6a0e-ccc3-66517d5e42e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.419396] env[68569]: DEBUG nova.compute.utils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 864.420608] env[68569]: DEBUG nova.compute.manager [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 864.420772] env[68569]: DEBUG nova.network.neutron [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 864.464029] env[68569]: DEBUG nova.policy [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e6bccdef3b8466cb4feae0b9cbddcd6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3098015751d54ac28e7171fc2948da9c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 864.572337] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': task-3167125, 'name': Rename_Task, 'duration_secs': 0.182861} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.573089] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 864.573089] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4a58978-2e63-465e-95ea-bbbef74ba6c5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.579685] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Waiting for the task: (returnval){ [ 864.579685] env[68569]: value = "task-3167126" [ 864.579685] env[68569]: _type = "Task" [ 864.579685] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.588373] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': task-3167126, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.664589] env[68569]: DEBUG nova.network.neutron [req-7fb52169-ca5a-47c8-99f1-89984e3feb85 req-fe27a26a-64de-4be6-b6aa-49213a7b0cd5 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Updated VIF entry in instance network info cache for port 324818fd-bb45-4575-9e4a-bb6516576dde. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 864.664589] env[68569]: DEBUG nova.network.neutron [req-7fb52169-ca5a-47c8-99f1-89984e3feb85 req-fe27a26a-64de-4be6-b6aa-49213a7b0cd5 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Updating instance_info_cache with network_info: [{"id": "324818fd-bb45-4575-9e4a-bb6516576dde", "address": "fa:16:3e:2b:19:03", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap324818fd-bb", "ovs_interfaceid": "324818fd-bb45-4575-9e4a-bb6516576dde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.711602] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5226e8e1-c310-6a0e-ccc3-66517d5e42e2, 'name': SearchDatastore_Task, 'duration_secs': 0.011786} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.711955] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 864.712210] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 864.712447] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.712594] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 864.712774] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 864.713223] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ad7c13c-2270-4b3f-b9c7-38f5f16842e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.724321] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 864.724321] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 864.725029] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03789108-8647-428f-b91b-bc453c31dffe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.729885] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 864.729885] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52082035-6ee5-6821-0a1a-feac79d30943" [ 864.729885] env[68569]: _type = "Task" [ 864.729885] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.741119] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52082035-6ee5-6821-0a1a-feac79d30943, 'name': SearchDatastore_Task, 'duration_secs': 0.007963} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.741665] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7286d55e-540c-4c83-bd9b-91b8112dd2e1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.746899] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 864.746899] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5274ff21-f5bd-490d-7175-feed33cad14a" [ 864.746899] env[68569]: _type = "Task" [ 864.746899] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.754710] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5274ff21-f5bd-490d-7175-feed33cad14a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.763583] env[68569]: DEBUG nova.network.neutron [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Successfully created port: 471748e7-587f-46c4-b8fd-1b7c1700ca69 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 864.926552] env[68569]: DEBUG nova.compute.manager [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 865.091552] env[68569]: DEBUG oslo_vmware.api [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': task-3167126, 'name': PowerOnVM_Task, 'duration_secs': 0.4133} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.091820] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 865.092155] env[68569]: INFO nova.compute.manager [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Took 8.38 seconds to spawn the instance on the hypervisor. [ 865.092421] env[68569]: DEBUG nova.compute.manager [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 865.093234] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b8f259-5d8b-49a6-9950-1bf55e569e53 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.169732] env[68569]: DEBUG oslo_concurrency.lockutils [req-7fb52169-ca5a-47c8-99f1-89984e3feb85 req-fe27a26a-64de-4be6-b6aa-49213a7b0cd5 service nova] Releasing lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.259774] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5274ff21-f5bd-490d-7175-feed33cad14a, 'name': SearchDatastore_Task, 'duration_secs': 0.0094} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.260867] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.261147] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 6824efd5-427b-420d-83d5-a1d5acd94bf9/6824efd5-427b-420d-83d5-a1d5acd94bf9.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 865.261411] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48d2b414-3cfd-4b20-aa5e-94558c9ec947 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.270414] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 865.270414] env[68569]: value = "task-3167127" [ 865.270414] env[68569]: _type = "Task" [ 865.270414] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.280339] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167127, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.344596] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44764567-a1a3-4390-97ff-73045cf67680 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.351644] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a2c7c8-f2a0-49ba-9510-32de0a39b02c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.383199] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f309891b-8168-42d9-8f23-9bcd172daff6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.390585] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a49dd8-0199-475f-b53f-6d6d4ca5843b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.406254] env[68569]: DEBUG nova.compute.provider_tree [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 865.613224] env[68569]: INFO nova.compute.manager [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Took 59.06 seconds to build instance. [ 865.780424] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167127, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.938232] env[68569]: DEBUG nova.scheduler.client.report [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 103 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 865.938540] env[68569]: DEBUG nova.compute.provider_tree [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 103 to 104 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 865.938734] env[68569]: DEBUG nova.compute.provider_tree [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 865.942938] env[68569]: DEBUG nova.compute.manager [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 865.968931] env[68569]: DEBUG nova.virt.hardware [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 865.969216] env[68569]: DEBUG nova.virt.hardware [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.969373] env[68569]: DEBUG nova.virt.hardware [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 865.969550] env[68569]: DEBUG nova.virt.hardware [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.969693] env[68569]: DEBUG nova.virt.hardware [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 865.969836] env[68569]: DEBUG nova.virt.hardware [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 865.970047] env[68569]: DEBUG nova.virt.hardware [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 865.970236] env[68569]: DEBUG nova.virt.hardware [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 865.970426] env[68569]: DEBUG nova.virt.hardware [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 865.970587] env[68569]: DEBUG nova.virt.hardware [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 865.970761] env[68569]: DEBUG nova.virt.hardware [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 865.971658] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2de1a9-8d50-49e8-af78-840c763802e9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.980370] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067915e2-6ea1-4ac7-9eb4-25fdfc9f3c34 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.117455] env[68569]: DEBUG oslo_concurrency.lockutils [None req-949a52ca-24ee-4884-8fdf-522d5b0e4e06 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Lock "a63b06a1-c24e-4013-a1f4-b227732a1e05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.515s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.184785] env[68569]: DEBUG nova.compute.manager [req-7b9feae9-0d50-412a-8cea-db830bfb845e req-caefd26b-b3d4-4ab4-ae0b-35a090f1f16f service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Received event network-vif-plugged-471748e7-587f-46c4-b8fd-1b7c1700ca69 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 866.185800] env[68569]: DEBUG oslo_concurrency.lockutils [req-7b9feae9-0d50-412a-8cea-db830bfb845e req-caefd26b-b3d4-4ab4-ae0b-35a090f1f16f service nova] Acquiring lock "f84204a9-aeea-498e-9682-298e581b34e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.185979] env[68569]: DEBUG oslo_concurrency.lockutils [req-7b9feae9-0d50-412a-8cea-db830bfb845e req-caefd26b-b3d4-4ab4-ae0b-35a090f1f16f service nova] Lock "f84204a9-aeea-498e-9682-298e581b34e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.186166] env[68569]: DEBUG oslo_concurrency.lockutils [req-7b9feae9-0d50-412a-8cea-db830bfb845e req-caefd26b-b3d4-4ab4-ae0b-35a090f1f16f service nova] Lock "f84204a9-aeea-498e-9682-298e581b34e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.186632] env[68569]: DEBUG nova.compute.manager [req-7b9feae9-0d50-412a-8cea-db830bfb845e req-caefd26b-b3d4-4ab4-ae0b-35a090f1f16f service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] No waiting events found dispatching network-vif-plugged-471748e7-587f-46c4-b8fd-1b7c1700ca69 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 866.186632] env[68569]: WARNING nova.compute.manager [req-7b9feae9-0d50-412a-8cea-db830bfb845e req-caefd26b-b3d4-4ab4-ae0b-35a090f1f16f service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Received unexpected event network-vif-plugged-471748e7-587f-46c4-b8fd-1b7c1700ca69 for instance with vm_state building and task_state spawning. [ 866.281381] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167127, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541654} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.281694] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 6824efd5-427b-420d-83d5-a1d5acd94bf9/6824efd5-427b-420d-83d5-a1d5acd94bf9.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 866.281923] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 866.282380] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d75f084c-3674-42a9-96ad-a685f988e60a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.290092] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 866.290092] env[68569]: value = "task-3167128" [ 866.290092] env[68569]: _type = "Task" [ 866.290092] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.298015] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167128, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.332861] env[68569]: DEBUG nova.network.neutron [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Successfully updated port: 471748e7-587f-46c4-b8fd-1b7c1700ca69 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 866.446767] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.536s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.447339] env[68569]: DEBUG nova.compute.manager [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 866.449873] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.816s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.450104] env[68569]: DEBUG nova.objects.instance [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Lazy-loading 'resources' on Instance uuid 50abc994-682a-40d6-ae77-601839b98793 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 866.620051] env[68569]: DEBUG nova.compute.manager [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 866.800200] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167128, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070021} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.800478] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 866.801438] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1174930c-8292-45a9-aa61-68b9205b7b1f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.824124] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 6824efd5-427b-420d-83d5-a1d5acd94bf9/6824efd5-427b-420d-83d5-a1d5acd94bf9.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 866.824434] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebb439e1-c0da-40a3-a403-efc0ccb2e017 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.838874] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquiring lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.838981] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquired lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.839147] env[68569]: DEBUG nova.network.neutron [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 866.848040] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 866.848040] env[68569]: value = "task-3167129" [ 866.848040] env[68569]: _type = "Task" [ 866.848040] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.855745] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167129, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.957358] env[68569]: DEBUG nova.compute.utils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 866.959210] env[68569]: DEBUG nova.compute.manager [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 866.959375] env[68569]: DEBUG nova.network.neutron [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 866.995945] env[68569]: DEBUG nova.policy [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20da20306b634a69877c972d9f8ecf8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cc349e5e6d3b4ad39e917625b5078597', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 867.138789] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.264342] env[68569]: DEBUG nova.network.neutron [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Successfully created port: 289a9512-486e-4b33-bb13-4b51795aaf2b {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 867.352937] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2458f9a-31a3-47f1-a31e-90228d68404a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.361691] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167129, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.365541] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4de820c-ccf5-4e0e-99a1-43352d238c90 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.397233] env[68569]: DEBUG nova.network.neutron [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 867.400052] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c32eaf9-20e8-4356-95c0-c9c3c6a18afe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.407799] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdc6612-0d42-43c5-95ae-2deaa3811bfb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.423703] env[68569]: DEBUG nova.compute.provider_tree [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.462344] env[68569]: DEBUG nova.compute.manager [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 867.613510] env[68569]: DEBUG nova.network.neutron [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Updating instance_info_cache with network_info: [{"id": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "address": "fa:16:3e:12:4c:d5", "network": {"id": "675522f4-7e1e-4b34-b8d4-6fad48ba3550", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1678959422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3098015751d54ac28e7171fc2948da9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471748e7-58", "ovs_interfaceid": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.858325] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167129, 'name': ReconfigVM_Task, 'duration_secs': 0.756819} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.858645] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 6824efd5-427b-420d-83d5-a1d5acd94bf9/6824efd5-427b-420d-83d5-a1d5acd94bf9.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 867.859294] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3e8f95a-7c23-4087-ac40-2e801b394a1c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.866134] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 867.866134] env[68569]: value = "task-3167130" [ 867.866134] env[68569]: _type = "Task" [ 867.866134] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.878607] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167130, 'name': Rename_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.930166] env[68569]: DEBUG nova.scheduler.client.report [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 868.115791] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Releasing lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.116233] env[68569]: DEBUG nova.compute.manager [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Instance network_info: |[{"id": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "address": "fa:16:3e:12:4c:d5", "network": {"id": "675522f4-7e1e-4b34-b8d4-6fad48ba3550", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1678959422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3098015751d54ac28e7171fc2948da9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471748e7-58", "ovs_interfaceid": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 868.116626] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:4c:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a15de394-0367-4921-a5c1-6ac8615e3283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '471748e7-587f-46c4-b8fd-1b7c1700ca69', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 868.124208] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Creating folder: Project (3098015751d54ac28e7171fc2948da9c). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 868.124345] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9dd4bb31-c265-41ac-8af4-b0f85a9b4317 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.135558] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Created folder: Project (3098015751d54ac28e7171fc2948da9c) in parent group-v633430. [ 868.135798] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Creating folder: Instances. Parent ref: group-v633612. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 868.136051] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7b6daa3-d221-495e-9543-c777ffdb9201 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.145459] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Created folder: Instances in parent group-v633612. [ 868.145678] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 868.145882] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 868.146092] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-963d6588-405b-42c0-bb26-e7e8d550b0f1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.164653] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 868.164653] env[68569]: value = "task-3167133" [ 868.164653] env[68569]: _type = "Task" [ 868.164653] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.173514] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167133, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.217557] env[68569]: DEBUG nova.compute.manager [req-724de9aa-b42d-428d-ab52-e18c27f3a78b req-ad321ede-a3a2-4cf2-a90f-5bb2bbcbc5fc service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Received event network-changed-471748e7-587f-46c4-b8fd-1b7c1700ca69 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 868.217758] env[68569]: DEBUG nova.compute.manager [req-724de9aa-b42d-428d-ab52-e18c27f3a78b req-ad321ede-a3a2-4cf2-a90f-5bb2bbcbc5fc service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Refreshing instance network info cache due to event network-changed-471748e7-587f-46c4-b8fd-1b7c1700ca69. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 868.217972] env[68569]: DEBUG oslo_concurrency.lockutils [req-724de9aa-b42d-428d-ab52-e18c27f3a78b req-ad321ede-a3a2-4cf2-a90f-5bb2bbcbc5fc service nova] Acquiring lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.218127] env[68569]: DEBUG oslo_concurrency.lockutils [req-724de9aa-b42d-428d-ab52-e18c27f3a78b req-ad321ede-a3a2-4cf2-a90f-5bb2bbcbc5fc service nova] Acquired lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.218306] env[68569]: DEBUG nova.network.neutron [req-724de9aa-b42d-428d-ab52-e18c27f3a78b req-ad321ede-a3a2-4cf2-a90f-5bb2bbcbc5fc service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Refreshing network info cache for port 471748e7-587f-46c4-b8fd-1b7c1700ca69 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 868.340884] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Acquiring lock "a63b06a1-c24e-4013-a1f4-b227732a1e05" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.341251] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Lock "a63b06a1-c24e-4013-a1f4-b227732a1e05" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.341531] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Acquiring lock "a63b06a1-c24e-4013-a1f4-b227732a1e05-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.341820] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Lock "a63b06a1-c24e-4013-a1f4-b227732a1e05-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.341977] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Lock "a63b06a1-c24e-4013-a1f4-b227732a1e05-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.344206] env[68569]: INFO nova.compute.manager [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Terminating instance [ 868.376795] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167130, 'name': Rename_Task, 'duration_secs': 0.398262} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.377097] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 868.377356] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0f18ade-3b96-4df8-a2ea-b444b99915be {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.383894] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 868.383894] env[68569]: value = "task-3167134" [ 868.383894] env[68569]: _type = "Task" [ 868.383894] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.393164] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167134, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.437757] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.987s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.439252] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.892s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.439852] env[68569]: DEBUG nova.objects.instance [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Lazy-loading 'resources' on Instance uuid 9eafa273-097b-48ac-ae5f-4f7a469ac861 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 868.462642] env[68569]: INFO nova.scheduler.client.report [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Deleted allocations for instance 50abc994-682a-40d6-ae77-601839b98793 [ 868.472040] env[68569]: DEBUG nova.compute.manager [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 868.501547] env[68569]: DEBUG nova.virt.hardware [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 868.501894] env[68569]: DEBUG nova.virt.hardware [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 868.502187] env[68569]: DEBUG nova.virt.hardware [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 868.502495] env[68569]: DEBUG nova.virt.hardware [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 868.502674] env[68569]: DEBUG nova.virt.hardware [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 868.502831] env[68569]: DEBUG nova.virt.hardware [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 868.503055] env[68569]: DEBUG nova.virt.hardware [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 868.503289] env[68569]: DEBUG nova.virt.hardware [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 868.503464] env[68569]: DEBUG nova.virt.hardware [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 868.503696] env[68569]: DEBUG nova.virt.hardware [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 868.503882] env[68569]: DEBUG nova.virt.hardware [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 868.504840] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53205bd3-7a73-43ac-b4ab-f70f99b92778 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.513402] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d88ad88-3cba-4f22-840d-a65cd470dcc8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.675043] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167133, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.849420] env[68569]: DEBUG nova.compute.manager [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 868.849420] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 868.850796] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cbbe0ef-2e45-4b49-921b-9f465618a479 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.862322] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 868.862826] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-445fb12a-ba05-46a7-a015-fab23f01a073 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.869535] env[68569]: DEBUG oslo_vmware.api [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Waiting for the task: (returnval){ [ 868.869535] env[68569]: value = "task-3167135" [ 868.869535] env[68569]: _type = "Task" [ 868.869535] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.879982] env[68569]: DEBUG oslo_vmware.api [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': task-3167135, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.892037] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167134, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.934925] env[68569]: DEBUG nova.network.neutron [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Successfully updated port: 289a9512-486e-4b33-bb13-4b51795aaf2b {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 868.971671] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ec8ea187-62e3-487a-bb82-bf3f9467efe1 tempest-ServersAdmin275Test-1333460505 tempest-ServersAdmin275Test-1333460505-project-member] Lock "50abc994-682a-40d6-ae77-601839b98793" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.627s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.974604] env[68569]: DEBUG nova.network.neutron [req-724de9aa-b42d-428d-ab52-e18c27f3a78b req-ad321ede-a3a2-4cf2-a90f-5bb2bbcbc5fc service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Updated VIF entry in instance network info cache for port 471748e7-587f-46c4-b8fd-1b7c1700ca69. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 868.975103] env[68569]: DEBUG nova.network.neutron [req-724de9aa-b42d-428d-ab52-e18c27f3a78b req-ad321ede-a3a2-4cf2-a90f-5bb2bbcbc5fc service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Updating instance_info_cache with network_info: [{"id": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "address": "fa:16:3e:12:4c:d5", "network": {"id": "675522f4-7e1e-4b34-b8d4-6fad48ba3550", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1678959422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3098015751d54ac28e7171fc2948da9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471748e7-58", "ovs_interfaceid": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.176225] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167133, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.371226] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734602f5-1df9-4476-9800-5f752644e7f2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.384476] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a69fa7-1312-49bf-8e38-35419fb9f700 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.387518] env[68569]: DEBUG oslo_vmware.api [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': task-3167135, 'name': PowerOffVM_Task, 'duration_secs': 0.175589} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.387842] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 869.388015] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 869.391173] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0eef5d71-bffd-4a1b-bd96-bd2616d22d1c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.422089] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108db48b-3784-4c6f-9fa3-e846a2c34b48 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.424737] env[68569]: DEBUG oslo_vmware.api [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167134, 'name': PowerOnVM_Task, 'duration_secs': 0.757313} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.424983] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 869.425195] env[68569]: INFO nova.compute.manager [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Took 8.15 seconds to spawn the instance on the hypervisor. [ 869.425368] env[68569]: DEBUG nova.compute.manager [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 869.426437] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b677744-2299-4154-a518-4c972e743be5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.431984] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df13cc38-b2ef-45d7-a0f6-69f0c39ed981 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.436959] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Acquiring lock "refresh_cache-dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.437111] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Acquired lock "refresh_cache-dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.437256] env[68569]: DEBUG nova.network.neutron [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 869.451908] env[68569]: DEBUG nova.compute.provider_tree [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.457823] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 869.458061] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 869.458250] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Deleting the datastore file [datastore1] a63b06a1-c24e-4013-a1f4-b227732a1e05 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 869.458736] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cdfcb88c-c91e-4c7a-88a7-02f693e29dc6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.465106] env[68569]: DEBUG oslo_vmware.api [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Waiting for the task: (returnval){ [ 869.465106] env[68569]: value = "task-3167137" [ 869.465106] env[68569]: _type = "Task" [ 869.465106] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.474226] env[68569]: DEBUG oslo_vmware.api [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': task-3167137, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.477879] env[68569]: DEBUG oslo_concurrency.lockutils [req-724de9aa-b42d-428d-ab52-e18c27f3a78b req-ad321ede-a3a2-4cf2-a90f-5bb2bbcbc5fc service nova] Releasing lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.677649] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167133, 'name': CreateVM_Task, 'duration_secs': 1.226419} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.677877] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 869.679539] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.679744] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.680151] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 869.680437] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-101c4cd5-9271-463f-8120-6cd64c55f0ab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.685345] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 869.685345] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5262e30d-10de-23c7-be88-73c7a8eaa1d2" [ 869.685345] env[68569]: _type = "Task" [ 869.685345] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.693656] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5262e30d-10de-23c7-be88-73c7a8eaa1d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.954573] env[68569]: INFO nova.compute.manager [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Took 52.77 seconds to build instance. [ 869.955834] env[68569]: DEBUG nova.scheduler.client.report [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 869.978939] env[68569]: DEBUG oslo_vmware.api [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Task: {'id': task-3167137, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.405531} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.978939] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 869.978939] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 869.978939] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 869.978939] env[68569]: INFO nova.compute.manager [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Took 1.13 seconds to destroy the instance on the hypervisor. [ 869.979242] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 869.979647] env[68569]: DEBUG nova.compute.manager [-] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 869.979786] env[68569]: DEBUG nova.network.neutron [-] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 869.984034] env[68569]: DEBUG nova.network.neutron [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 870.147309] env[68569]: DEBUG nova.network.neutron [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Updating instance_info_cache with network_info: [{"id": "289a9512-486e-4b33-bb13-4b51795aaf2b", "address": "fa:16:3e:2b:11:5c", "network": {"id": "5a057e29-5621-4cc2-b42d-dc78c7d0eac1", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1694431738-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc349e5e6d3b4ad39e917625b5078597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap289a9512-48", "ovs_interfaceid": "289a9512-486e-4b33-bb13-4b51795aaf2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.198637] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5262e30d-10de-23c7-be88-73c7a8eaa1d2, 'name': SearchDatastore_Task, 'duration_secs': 0.010612} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.199024] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.199198] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 870.199433] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.199580] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.199765] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 870.200111] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b62b686-a7e5-4885-b7e7-c463501defd2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.209147] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 870.209357] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 870.210161] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ff21b04-af2b-45b2-9f2f-b339113a8aed {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.220220] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 870.220220] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d00d56-3c12-109f-0484-837c138a7da1" [ 870.220220] env[68569]: _type = "Task" [ 870.220220] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.228155] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d00d56-3c12-109f-0484-837c138a7da1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.430057] env[68569]: DEBUG nova.compute.manager [req-4391980c-4bec-4d7b-b9f5-6883db922810 req-3136b6b7-2138-45d8-ad13-ed4333316c42 service nova] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Received event network-vif-plugged-289a9512-486e-4b33-bb13-4b51795aaf2b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 870.430678] env[68569]: DEBUG oslo_concurrency.lockutils [req-4391980c-4bec-4d7b-b9f5-6883db922810 req-3136b6b7-2138-45d8-ad13-ed4333316c42 service nova] Acquiring lock "dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.430678] env[68569]: DEBUG oslo_concurrency.lockutils [req-4391980c-4bec-4d7b-b9f5-6883db922810 req-3136b6b7-2138-45d8-ad13-ed4333316c42 service nova] Lock "dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.430854] env[68569]: DEBUG oslo_concurrency.lockutils [req-4391980c-4bec-4d7b-b9f5-6883db922810 req-3136b6b7-2138-45d8-ad13-ed4333316c42 service nova] Lock "dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.431309] env[68569]: DEBUG nova.compute.manager [req-4391980c-4bec-4d7b-b9f5-6883db922810 req-3136b6b7-2138-45d8-ad13-ed4333316c42 service nova] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] No waiting events found dispatching network-vif-plugged-289a9512-486e-4b33-bb13-4b51795aaf2b {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 870.431309] env[68569]: WARNING nova.compute.manager [req-4391980c-4bec-4d7b-b9f5-6883db922810 req-3136b6b7-2138-45d8-ad13-ed4333316c42 service nova] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Received unexpected event network-vif-plugged-289a9512-486e-4b33-bb13-4b51795aaf2b for instance with vm_state building and task_state spawning. [ 870.431399] env[68569]: DEBUG nova.compute.manager [req-4391980c-4bec-4d7b-b9f5-6883db922810 req-3136b6b7-2138-45d8-ad13-ed4333316c42 service nova] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Received event network-changed-289a9512-486e-4b33-bb13-4b51795aaf2b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 870.431557] env[68569]: DEBUG nova.compute.manager [req-4391980c-4bec-4d7b-b9f5-6883db922810 req-3136b6b7-2138-45d8-ad13-ed4333316c42 service nova] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Refreshing instance network info cache due to event network-changed-289a9512-486e-4b33-bb13-4b51795aaf2b. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 870.431771] env[68569]: DEBUG oslo_concurrency.lockutils [req-4391980c-4bec-4d7b-b9f5-6883db922810 req-3136b6b7-2138-45d8-ad13-ed4333316c42 service nova] Acquiring lock "refresh_cache-dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.461349] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4137e6-3d96-4f44-b0de-6db1d4c89d64 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "6824efd5-427b-420d-83d5-a1d5acd94bf9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.802s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.463909] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.025s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.466499] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.164s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.468056] env[68569]: INFO nova.compute.claims [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 870.496779] env[68569]: INFO nova.scheduler.client.report [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Deleted allocations for instance 9eafa273-097b-48ac-ae5f-4f7a469ac861 [ 870.654019] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Releasing lock "refresh_cache-dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.654019] env[68569]: DEBUG nova.compute.manager [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Instance network_info: |[{"id": "289a9512-486e-4b33-bb13-4b51795aaf2b", "address": "fa:16:3e:2b:11:5c", "network": {"id": "5a057e29-5621-4cc2-b42d-dc78c7d0eac1", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1694431738-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc349e5e6d3b4ad39e917625b5078597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap289a9512-48", "ovs_interfaceid": "289a9512-486e-4b33-bb13-4b51795aaf2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 870.654264] env[68569]: DEBUG oslo_concurrency.lockutils [req-4391980c-4bec-4d7b-b9f5-6883db922810 req-3136b6b7-2138-45d8-ad13-ed4333316c42 service nova] Acquired lock "refresh_cache-dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.654264] env[68569]: DEBUG nova.network.neutron [req-4391980c-4bec-4d7b-b9f5-6883db922810 req-3136b6b7-2138-45d8-ad13-ed4333316c42 service nova] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Refreshing network info cache for port 289a9512-486e-4b33-bb13-4b51795aaf2b {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 870.654264] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:11:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '22390021-1742-415d-b442-811550d09927', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '289a9512-486e-4b33-bb13-4b51795aaf2b', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 870.661683] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Creating folder: Project (cc349e5e6d3b4ad39e917625b5078597). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 870.666321] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2689d15-e3df-40e3-acfb-602a9df826db {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.678609] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Created folder: Project (cc349e5e6d3b4ad39e917625b5078597) in parent group-v633430. [ 870.679183] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Creating folder: Instances. Parent ref: group-v633615. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 870.679567] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b222eb82-4c3a-4a57-b11e-9b96ef041fc3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.690053] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Created folder: Instances in parent group-v633615. [ 870.690520] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 870.693073] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 870.693073] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-316021ae-cf7e-4d79-98d8-eff6e03ea3bc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.717017] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 870.717017] env[68569]: value = "task-3167140" [ 870.717017] env[68569]: _type = "Task" [ 870.717017] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.725585] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167140, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.731258] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d00d56-3c12-109f-0484-837c138a7da1, 'name': SearchDatastore_Task, 'duration_secs': 0.015927} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.732407] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-227d1588-185e-48ea-840a-528a19d26912 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.739106] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 870.739106] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524ee8f6-cf58-a1df-23db-5dcc46e294d1" [ 870.739106] env[68569]: _type = "Task" [ 870.739106] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.750021] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524ee8f6-cf58-a1df-23db-5dcc46e294d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.929986] env[68569]: DEBUG nova.network.neutron [req-4391980c-4bec-4d7b-b9f5-6883db922810 req-3136b6b7-2138-45d8-ad13-ed4333316c42 service nova] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Updated VIF entry in instance network info cache for port 289a9512-486e-4b33-bb13-4b51795aaf2b. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 870.929986] env[68569]: DEBUG nova.network.neutron [req-4391980c-4bec-4d7b-b9f5-6883db922810 req-3136b6b7-2138-45d8-ad13-ed4333316c42 service nova] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Updating instance_info_cache with network_info: [{"id": "289a9512-486e-4b33-bb13-4b51795aaf2b", "address": "fa:16:3e:2b:11:5c", "network": {"id": "5a057e29-5621-4cc2-b42d-dc78c7d0eac1", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1694431738-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc349e5e6d3b4ad39e917625b5078597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap289a9512-48", "ovs_interfaceid": "289a9512-486e-4b33-bb13-4b51795aaf2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.964867] env[68569]: DEBUG nova.compute.manager [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 871.005533] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f5342fa-d966-4ec9-ad42-94580ecefd6b tempest-ServerAddressesTestJSON-1987098552 tempest-ServerAddressesTestJSON-1987098552-project-member] Lock "9eafa273-097b-48ac-ae5f-4f7a469ac861" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.857s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.065410] env[68569]: DEBUG nova.network.neutron [-] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.227678] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167140, 'name': CreateVM_Task, 'duration_secs': 0.4812} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.233108] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 871.233108] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.233108] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.233108] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 871.233108] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb8b4169-4d69-4f88-aa6e-0529888e7c6c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.236758] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Waiting for the task: (returnval){ [ 871.236758] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527d8c84-8b05-0235-7477-0346a272bc81" [ 871.236758] env[68569]: _type = "Task" [ 871.236758] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.251311] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524ee8f6-cf58-a1df-23db-5dcc46e294d1, 'name': SearchDatastore_Task, 'duration_secs': 0.014804} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.256120] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.256120] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] f84204a9-aeea-498e-9682-298e581b34e3/f84204a9-aeea-498e-9682-298e581b34e3.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 871.256120] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527d8c84-8b05-0235-7477-0346a272bc81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.256120] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f82a3cd-d7de-4556-a674-58bc694207f5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.262904] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 871.262904] env[68569]: value = "task-3167141" [ 871.262904] env[68569]: _type = "Task" [ 871.262904] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.273029] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167141, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.436046] env[68569]: DEBUG oslo_concurrency.lockutils [req-4391980c-4bec-4d7b-b9f5-6883db922810 req-3136b6b7-2138-45d8-ad13-ed4333316c42 service nova] Releasing lock "refresh_cache-dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.513925] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.567893] env[68569]: INFO nova.compute.manager [-] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Took 1.59 seconds to deallocate network for instance. [ 871.755484] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527d8c84-8b05-0235-7477-0346a272bc81, 'name': SearchDatastore_Task, 'duration_secs': 0.028775} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.755917] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.755917] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 871.756261] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.756411] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.756605] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 871.759857] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e0eff46-8da8-4c82-a933-800d9f122fa3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.774718] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167141, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.776736] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 871.776981] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 871.780406] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58808383-ece2-4e96-99da-2adf130d0aac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.788660] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Waiting for the task: (returnval){ [ 871.788660] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c48c8d-6803-695f-ec1c-05e247faec03" [ 871.788660] env[68569]: _type = "Task" [ 871.788660] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.797658] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c48c8d-6803-695f-ec1c-05e247faec03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.002983] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d692273c-4802-4cd8-9f00-b94c433b562d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.012720] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16356624-13dc-4490-864e-91a3ee645a0c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.047327] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f962e77-36a3-427f-bd38-000ba1853c78 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.055045] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378d6259-48fb-4ebf-b583-d94017737db0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.070102] env[68569]: DEBUG nova.compute.provider_tree [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.082062] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.277109] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167141, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.690892} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.277109] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] f84204a9-aeea-498e-9682-298e581b34e3/f84204a9-aeea-498e-9682-298e581b34e3.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 872.277109] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 872.277109] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-685a57e6-cc9c-4864-aa85-a8132900f71c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.283611] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 872.283611] env[68569]: value = "task-3167142" [ 872.283611] env[68569]: _type = "Task" [ 872.283611] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.295524] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167142, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.300650] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c48c8d-6803-695f-ec1c-05e247faec03, 'name': SearchDatastore_Task, 'duration_secs': 0.067649} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.301542] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c928e47-753f-4c93-b886-14aab72dbec4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.308056] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Waiting for the task: (returnval){ [ 872.308056] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522e0ca6-192b-a7ca-692a-9ed54881ea74" [ 872.308056] env[68569]: _type = "Task" [ 872.308056] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.318573] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522e0ca6-192b-a7ca-692a-9ed54881ea74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.507582] env[68569]: DEBUG nova.compute.manager [req-249a6bc5-5e8c-4124-9593-0dce61ed64be req-42ebf5b0-dff7-44a6-8c68-47c9b196fd88 service nova] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Received event network-vif-deleted-eb7c5f24-ece0-4c55-86ec-3c90bc431594 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 872.507790] env[68569]: DEBUG nova.compute.manager [req-249a6bc5-5e8c-4124-9593-0dce61ed64be req-42ebf5b0-dff7-44a6-8c68-47c9b196fd88 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Received event network-changed-324818fd-bb45-4575-9e4a-bb6516576dde {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 872.507949] env[68569]: DEBUG nova.compute.manager [req-249a6bc5-5e8c-4124-9593-0dce61ed64be req-42ebf5b0-dff7-44a6-8c68-47c9b196fd88 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Refreshing instance network info cache due to event network-changed-324818fd-bb45-4575-9e4a-bb6516576dde. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 872.508332] env[68569]: DEBUG oslo_concurrency.lockutils [req-249a6bc5-5e8c-4124-9593-0dce61ed64be req-42ebf5b0-dff7-44a6-8c68-47c9b196fd88 service nova] Acquiring lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.508478] env[68569]: DEBUG oslo_concurrency.lockutils [req-249a6bc5-5e8c-4124-9593-0dce61ed64be req-42ebf5b0-dff7-44a6-8c68-47c9b196fd88 service nova] Acquired lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.508926] env[68569]: DEBUG nova.network.neutron [req-249a6bc5-5e8c-4124-9593-0dce61ed64be req-42ebf5b0-dff7-44a6-8c68-47c9b196fd88 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Refreshing network info cache for port 324818fd-bb45-4575-9e4a-bb6516576dde {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 872.573649] env[68569]: DEBUG nova.scheduler.client.report [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 872.795883] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167142, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086541} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.796460] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 872.797767] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6571acdd-4bce-4f23-baaf-488bac1fa878 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.821226] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] f84204a9-aeea-498e-9682-298e581b34e3/f84204a9-aeea-498e-9682-298e581b34e3.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 872.824571] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16ffedbd-145c-410b-9f6a-bcd0c6dbfaff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.846235] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522e0ca6-192b-a7ca-692a-9ed54881ea74, 'name': SearchDatastore_Task, 'duration_secs': 0.01246} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.847750] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.848080] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53/dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 872.848394] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 872.848394] env[68569]: value = "task-3167143" [ 872.848394] env[68569]: _type = "Task" [ 872.848394] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.848583] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-152587c4-713a-4c43-ade6-2e359a792b10 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.861371] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167143, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.863850] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Waiting for the task: (returnval){ [ 872.863850] env[68569]: value = "task-3167144" [ 872.863850] env[68569]: _type = "Task" [ 872.863850] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.874977] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167144, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.080415] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.614s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.081687] env[68569]: DEBUG nova.compute.manager [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 873.088136] env[68569]: DEBUG oslo_concurrency.lockutils [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.829s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.088384] env[68569]: DEBUG nova.objects.instance [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lazy-loading 'resources' on Instance uuid cc5139e1-4601-4966-9224-25b8223b8a57 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 873.336639] env[68569]: DEBUG nova.network.neutron [req-249a6bc5-5e8c-4124-9593-0dce61ed64be req-42ebf5b0-dff7-44a6-8c68-47c9b196fd88 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Updated VIF entry in instance network info cache for port 324818fd-bb45-4575-9e4a-bb6516576dde. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 873.336639] env[68569]: DEBUG nova.network.neutron [req-249a6bc5-5e8c-4124-9593-0dce61ed64be req-42ebf5b0-dff7-44a6-8c68-47c9b196fd88 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Updating instance_info_cache with network_info: [{"id": "324818fd-bb45-4575-9e4a-bb6516576dde", "address": "fa:16:3e:2b:19:03", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap324818fd-bb", "ovs_interfaceid": "324818fd-bb45-4575-9e4a-bb6516576dde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.366273] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167143, 'name': ReconfigVM_Task, 'duration_secs': 0.365656} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.370218] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Reconfigured VM instance instance-0000003f to attach disk [datastore1] f84204a9-aeea-498e-9682-298e581b34e3/f84204a9-aeea-498e-9682-298e581b34e3.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 873.374039] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17b9d8a4-11fc-4b4b-a83a-b1f05afc73e0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.379857] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167144, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.381459] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 873.381459] env[68569]: value = "task-3167145" [ 873.381459] env[68569]: _type = "Task" [ 873.381459] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.393698] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167145, 'name': Rename_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.594573] env[68569]: DEBUG nova.compute.utils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 873.596262] env[68569]: DEBUG nova.compute.manager [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 873.596451] env[68569]: DEBUG nova.network.neutron [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 873.666135] env[68569]: DEBUG nova.policy [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54ebbdfe9bfb4854a40b07d60c7a9efb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f361997374e943cfa7a8e4e4884d6c65', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 873.838177] env[68569]: DEBUG oslo_concurrency.lockutils [req-249a6bc5-5e8c-4124-9593-0dce61ed64be req-42ebf5b0-dff7-44a6-8c68-47c9b196fd88 service nova] Releasing lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.875036] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167144, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.935437} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.875036] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53/dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 873.875036] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 873.875269] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58c81a4a-5274-4443-938d-a39952e9b42f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.884228] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Waiting for the task: (returnval){ [ 873.884228] env[68569]: value = "task-3167146" [ 873.884228] env[68569]: _type = "Task" [ 873.884228] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.900510] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167145, 'name': Rename_Task, 'duration_secs': 0.18858} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.903415] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 873.903415] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167146, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.903592] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f80c52e4-453c-4a77-a171-22c8f1bec790 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.909748] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 873.909748] env[68569]: value = "task-3167147" [ 873.909748] env[68569]: _type = "Task" [ 873.909748] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.921541] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167147, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.100029] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f407fa62-fa6a-4540-98db-4a1d498a6e08 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.104280] env[68569]: DEBUG nova.compute.manager [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 874.114011] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aced3df-950c-4a0f-967d-8d1c80571f8b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.147250] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf2f64b-6a1a-4cba-9447-f7a4d0449a2f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.157391] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb5511d6-aec4-4fcf-b197-98b2769dc0f8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.172689] env[68569]: DEBUG nova.compute.provider_tree [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.212604] env[68569]: DEBUG nova.network.neutron [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Successfully created port: 5d59acab-5f9d-44bc-ac8c-231dda0ac182 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 874.400513] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167146, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099268} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.400513] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 874.401345] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b03d04-f983-465e-aece-586feafebf0f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.427023] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53/dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 874.428098] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-830a1258-aa95-4146-a63e-49a434d9c35e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.449364] env[68569]: DEBUG oslo_vmware.api [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167147, 'name': PowerOnVM_Task, 'duration_secs': 0.510333} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.451332] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 874.452042] env[68569]: INFO nova.compute.manager [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Took 8.51 seconds to spawn the instance on the hypervisor. [ 874.452042] env[68569]: DEBUG nova.compute.manager [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 874.452155] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Waiting for the task: (returnval){ [ 874.452155] env[68569]: value = "task-3167148" [ 874.452155] env[68569]: _type = "Task" [ 874.452155] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.452823] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1316cc-735d-44bd-bd60-947230a18742 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.468084] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167148, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.675389] env[68569]: DEBUG nova.scheduler.client.report [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 874.978150] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167148, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.981299] env[68569]: INFO nova.compute.manager [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Took 47.52 seconds to build instance. [ 875.114822] env[68569]: DEBUG nova.compute.manager [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 875.141439] env[68569]: DEBUG nova.virt.hardware [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 875.141753] env[68569]: DEBUG nova.virt.hardware [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 875.141830] env[68569]: DEBUG nova.virt.hardware [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 875.142010] env[68569]: DEBUG nova.virt.hardware [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 875.142165] env[68569]: DEBUG nova.virt.hardware [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 875.142307] env[68569]: DEBUG nova.virt.hardware [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 875.142506] env[68569]: DEBUG nova.virt.hardware [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 875.142654] env[68569]: DEBUG nova.virt.hardware [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 875.142812] env[68569]: DEBUG nova.virt.hardware [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 875.142965] env[68569]: DEBUG nova.virt.hardware [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 875.143259] env[68569]: DEBUG nova.virt.hardware [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 875.144164] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e653999-654d-42be-ab65-99aecea6153d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.156134] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828208b2-4245-428f-974e-fb189d946395 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.183362] env[68569]: DEBUG oslo_concurrency.lockutils [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.099s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.185947] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.834s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.186408] env[68569]: DEBUG nova.objects.instance [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lazy-loading 'resources' on Instance uuid c634f7eb-2f71-473d-8f90-71d74edffecb {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 875.214360] env[68569]: INFO nova.scheduler.client.report [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Deleted allocations for instance cc5139e1-4601-4966-9224-25b8223b8a57 [ 875.467740] env[68569]: INFO nova.compute.manager [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Rescuing [ 875.468053] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquiring lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.471734] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquired lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.472009] env[68569]: DEBUG nova.network.neutron [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 875.473814] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167148, 'name': ReconfigVM_Task, 'duration_secs': 1.012947} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.474224] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Reconfigured VM instance instance-00000040 to attach disk [datastore1] dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53/dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 875.474860] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df3748c7-1826-435d-9c76-aa660efe15e5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.481769] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Waiting for the task: (returnval){ [ 875.481769] env[68569]: value = "task-3167149" [ 875.481769] env[68569]: _type = "Task" [ 875.481769] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.487274] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0ee1996f-ac94-4526-9495-903401d7c5ce tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Lock "f84204a9-aeea-498e-9682-298e581b34e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.992s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.497308] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167149, 'name': Rename_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.727846] env[68569]: DEBUG oslo_concurrency.lockutils [None req-561bb98b-f5b5-4693-a483-ac84d75fb5c6 tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "cc5139e1-4601-4966-9224-25b8223b8a57" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.919s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.993188] env[68569]: DEBUG nova.compute.manager [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 875.995766] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167149, 'name': Rename_Task, 'duration_secs': 0.204911} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.995955] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 875.996393] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38c12402-e1b6-4301-a657-61a80df19d53 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.007908] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Waiting for the task: (returnval){ [ 876.007908] env[68569]: value = "task-3167151" [ 876.007908] env[68569]: _type = "Task" [ 876.007908] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.022746] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167151, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.126313] env[68569]: DEBUG nova.compute.manager [req-0a7ba46e-c85c-4e74-bb78-40ab5cc422c3 req-61508aac-07c8-4773-9f96-95bcf8f35dbb service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Received event network-vif-plugged-5d59acab-5f9d-44bc-ac8c-231dda0ac182 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 876.126407] env[68569]: DEBUG oslo_concurrency.lockutils [req-0a7ba46e-c85c-4e74-bb78-40ab5cc422c3 req-61508aac-07c8-4773-9f96-95bcf8f35dbb service nova] Acquiring lock "61aa0997-ffa6-4551-bdaa-132026e240f9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.127144] env[68569]: DEBUG oslo_concurrency.lockutils [req-0a7ba46e-c85c-4e74-bb78-40ab5cc422c3 req-61508aac-07c8-4773-9f96-95bcf8f35dbb service nova] Lock "61aa0997-ffa6-4551-bdaa-132026e240f9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.127144] env[68569]: DEBUG oslo_concurrency.lockutils [req-0a7ba46e-c85c-4e74-bb78-40ab5cc422c3 req-61508aac-07c8-4773-9f96-95bcf8f35dbb service nova] Lock "61aa0997-ffa6-4551-bdaa-132026e240f9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.127144] env[68569]: DEBUG nova.compute.manager [req-0a7ba46e-c85c-4e74-bb78-40ab5cc422c3 req-61508aac-07c8-4773-9f96-95bcf8f35dbb service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] No waiting events found dispatching network-vif-plugged-5d59acab-5f9d-44bc-ac8c-231dda0ac182 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 876.127144] env[68569]: WARNING nova.compute.manager [req-0a7ba46e-c85c-4e74-bb78-40ab5cc422c3 req-61508aac-07c8-4773-9f96-95bcf8f35dbb service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Received unexpected event network-vif-plugged-5d59acab-5f9d-44bc-ac8c-231dda0ac182 for instance with vm_state building and task_state spawning. [ 876.179284] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96cd19a-7a68-4b49-9c55-a61b61824ee4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.190949] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c35b217-3e29-439a-ac93-45f2800a8fd7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.226696] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3000b1-5230-437e-8773-eb81264a68b7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.236958] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c1b81b-e7de-44cc-9c9a-c1ccb3abe12a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.253252] env[68569]: DEBUG nova.compute.provider_tree [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.304117] env[68569]: DEBUG nova.network.neutron [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Successfully updated port: 5d59acab-5f9d-44bc-ac8c-231dda0ac182 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 876.309510] env[68569]: DEBUG nova.network.neutron [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Updating instance_info_cache with network_info: [{"id": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "address": "fa:16:3e:12:4c:d5", "network": {"id": "675522f4-7e1e-4b34-b8d4-6fad48ba3550", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1678959422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3098015751d54ac28e7171fc2948da9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471748e7-58", "ovs_interfaceid": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.523019] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167151, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.524156] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.763388] env[68569]: DEBUG nova.scheduler.client.report [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 876.807670] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "refresh_cache-61aa0997-ffa6-4551-bdaa-132026e240f9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.807847] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "refresh_cache-61aa0997-ffa6-4551-bdaa-132026e240f9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.808012] env[68569]: DEBUG nova.network.neutron [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 876.811999] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Releasing lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.020810] env[68569]: DEBUG oslo_vmware.api [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167151, 'name': PowerOnVM_Task, 'duration_secs': 0.556385} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.021350] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 877.021689] env[68569]: INFO nova.compute.manager [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Took 8.55 seconds to spawn the instance on the hypervisor. [ 877.022057] env[68569]: DEBUG nova.compute.manager [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 877.023679] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b11401-a50b-4ae1-868b-3a6f825609f7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.274264] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.088s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.277921] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.673s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.277991] env[68569]: DEBUG nova.objects.instance [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lazy-loading 'resources' on Instance uuid ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 877.297735] env[68569]: INFO nova.scheduler.client.report [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Deleted allocations for instance c634f7eb-2f71-473d-8f90-71d74edffecb [ 877.349325] env[68569]: DEBUG nova.network.neutron [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.547165] env[68569]: INFO nova.compute.manager [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Took 48.95 seconds to build instance. [ 877.609085] env[68569]: DEBUG nova.network.neutron [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Updating instance_info_cache with network_info: [{"id": "5d59acab-5f9d-44bc-ac8c-231dda0ac182", "address": "fa:16:3e:b2:78:e5", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d59acab-5f", "ovs_interfaceid": "5d59acab-5f9d-44bc-ac8c-231dda0ac182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.808704] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6cd21504-83ff-4c0c-96e8-6860181a094c tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "c634f7eb-2f71-473d-8f90-71d74edffecb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.821s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.050687] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bc38153b-f65c-4ba7-a733-e86207ef4105 tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Lock "dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.233s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.114498] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "refresh_cache-61aa0997-ffa6-4551-bdaa-132026e240f9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.114874] env[68569]: DEBUG nova.compute.manager [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Instance network_info: |[{"id": "5d59acab-5f9d-44bc-ac8c-231dda0ac182", "address": "fa:16:3e:b2:78:e5", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d59acab-5f", "ovs_interfaceid": "5d59acab-5f9d-44bc-ac8c-231dda0ac182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 878.115478] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:78:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f8442aa5-73db-4599-8564-b98a6ea26b9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d59acab-5f9d-44bc-ac8c-231dda0ac182', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 878.127390] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Creating folder: Project (f361997374e943cfa7a8e4e4884d6c65). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 878.127390] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f1e4b98-00d7-4cd7-addb-3de9c4886e6a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.138853] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Created folder: Project (f361997374e943cfa7a8e4e4884d6c65) in parent group-v633430. [ 878.139482] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Creating folder: Instances. Parent ref: group-v633618. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 878.139783] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b34f6ea7-ac51-4489-ade6-919e6a234dd9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.151529] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Created folder: Instances in parent group-v633618. [ 878.151783] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 878.152027] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 878.152258] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47428f62-8f23-483b-a712-0acc2956c027 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.179314] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 878.179314] env[68569]: value = "task-3167154" [ 878.179314] env[68569]: _type = "Task" [ 878.179314] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.188563] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167154, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.209521] env[68569]: DEBUG nova.compute.manager [req-76889d39-3af0-42ca-9e74-c3928b20d23e req-17044dd0-fb06-48e9-8651-37116fbe578a service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Received event network-changed-5d59acab-5f9d-44bc-ac8c-231dda0ac182 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 878.209719] env[68569]: DEBUG nova.compute.manager [req-76889d39-3af0-42ca-9e74-c3928b20d23e req-17044dd0-fb06-48e9-8651-37116fbe578a service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Refreshing instance network info cache due to event network-changed-5d59acab-5f9d-44bc-ac8c-231dda0ac182. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 878.209958] env[68569]: DEBUG oslo_concurrency.lockutils [req-76889d39-3af0-42ca-9e74-c3928b20d23e req-17044dd0-fb06-48e9-8651-37116fbe578a service nova] Acquiring lock "refresh_cache-61aa0997-ffa6-4551-bdaa-132026e240f9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.210123] env[68569]: DEBUG oslo_concurrency.lockutils [req-76889d39-3af0-42ca-9e74-c3928b20d23e req-17044dd0-fb06-48e9-8651-37116fbe578a service nova] Acquired lock "refresh_cache-61aa0997-ffa6-4551-bdaa-132026e240f9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.210284] env[68569]: DEBUG nova.network.neutron [req-76889d39-3af0-42ca-9e74-c3928b20d23e req-17044dd0-fb06-48e9-8651-37116fbe578a service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Refreshing network info cache for port 5d59acab-5f9d-44bc-ac8c-231dda0ac182 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 878.255143] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.255143] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.255143] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c450161-27b9-4568-976a-8debf431bd1b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.263734] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5005ae43-b9e6-435b-a825-f21f56ade035 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.299254] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbc98e4-5f52-4b22-8379-e704e7f12c06 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.308130] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c92bbf3-fbf8-491a-bfb7-431b91201d66 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.323506] env[68569]: DEBUG nova.compute.provider_tree [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 878.369234] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 878.369554] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aee7bf0f-7d70-45d2-912b-f70a4c5dedf7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.377049] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 878.377049] env[68569]: value = "task-3167155" [ 878.377049] env[68569]: _type = "Task" [ 878.377049] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.387767] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167155, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.481724] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Acquiring lock "dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.481724] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Lock "dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.481724] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Acquiring lock "dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.481724] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Lock "dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.481948] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Lock "dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.483439] env[68569]: INFO nova.compute.manager [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Terminating instance [ 878.554286] env[68569]: DEBUG nova.compute.manager [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 878.695437] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167154, 'name': CreateVM_Task, 'duration_secs': 0.359344} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.695437] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 878.696224] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.696456] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.697181] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 878.697302] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e70409d6-d77d-4a81-8a7d-6d3721169e36 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.703490] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 878.703490] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527bd2d7-67ca-99ec-5009-4880d032c201" [ 878.703490] env[68569]: _type = "Task" [ 878.703490] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.716420] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527bd2d7-67ca-99ec-5009-4880d032c201, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.855224] env[68569]: ERROR nova.scheduler.client.report [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] [req-f3d65b19-87ba-4dfc-bb97-6783c4aac7c4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f3d65b19-87ba-4dfc-bb97-6783c4aac7c4"}]} [ 878.887328] env[68569]: DEBUG nova.scheduler.client.report [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 878.902587] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167155, 'name': PowerOffVM_Task, 'duration_secs': 0.288231} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.903081] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 878.904308] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f013a141-96f4-4ba3-9e92-56d1acfd8f63 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.936504] env[68569]: DEBUG nova.scheduler.client.report [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 878.936724] env[68569]: DEBUG nova.compute.provider_tree [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 878.939375] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132a83e1-a8da-409b-973d-bdff73741eae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.956803] env[68569]: DEBUG nova.scheduler.client.report [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 878.980595] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 878.980901] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca09894b-4afc-4ff3-8512-855271cdc80e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.986034] env[68569]: DEBUG nova.scheduler.client.report [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 878.988650] env[68569]: DEBUG nova.compute.manager [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 878.988853] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 878.992227] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ceb4b16-c851-4e55-83ee-4a940183182e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.995359] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 878.995359] env[68569]: value = "task-3167156" [ 878.995359] env[68569]: _type = "Task" [ 878.995359] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.006082] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.006082] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3470ea1-f856-43b3-a0bb-5c9c1527b5f1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.015141] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] VM already powered off {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 879.015364] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 879.015627] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.015731] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.015899] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 879.016999] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6082c165-7b6b-4f2c-b0b2-a5bc9947a2e2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.020477] env[68569]: DEBUG oslo_vmware.api [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Waiting for the task: (returnval){ [ 879.020477] env[68569]: value = "task-3167157" [ 879.020477] env[68569]: _type = "Task" [ 879.020477] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.041031] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 879.041031] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 879.046560] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99aebe7e-3494-4e6f-b700-1191f3ad44cc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.048966] env[68569]: DEBUG oslo_vmware.api [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167157, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.052835] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 879.052835] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5233051b-0ab8-8446-373b-d190bb465659" [ 879.052835] env[68569]: _type = "Task" [ 879.052835] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.057990] env[68569]: DEBUG nova.network.neutron [req-76889d39-3af0-42ca-9e74-c3928b20d23e req-17044dd0-fb06-48e9-8651-37116fbe578a service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Updated VIF entry in instance network info cache for port 5d59acab-5f9d-44bc-ac8c-231dda0ac182. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 879.058102] env[68569]: DEBUG nova.network.neutron [req-76889d39-3af0-42ca-9e74-c3928b20d23e req-17044dd0-fb06-48e9-8651-37116fbe578a service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Updating instance_info_cache with network_info: [{"id": "5d59acab-5f9d-44bc-ac8c-231dda0ac182", "address": "fa:16:3e:b2:78:e5", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d59acab-5f", "ovs_interfaceid": "5d59acab-5f9d-44bc-ac8c-231dda0ac182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.065178] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5233051b-0ab8-8446-373b-d190bb465659, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.083908] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.213148] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527bd2d7-67ca-99ec-5009-4880d032c201, 'name': SearchDatastore_Task, 'duration_secs': 0.025082} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.213488] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.213714] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 879.214019] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.409431] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77367318-b2ae-4c08-8ad1-dcdd0b47334a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.419342] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904e9b4f-8c99-4e2a-806d-2576e2a80b59 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.449349] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd48f9be-b177-46dd-a904-59bd2070b17c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.456260] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad856b1f-b28b-4720-9821-977f37d46dbb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.469650] env[68569]: DEBUG nova.compute.provider_tree [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 879.530995] env[68569]: DEBUG oslo_vmware.api [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167157, 'name': PowerOffVM_Task, 'duration_secs': 0.276634} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.531302] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.531466] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 879.531718] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ba1ac3b-07c4-4bb2-9b53-b62a142c3a19 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.546436] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.546677] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.546874] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.547101] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.547304] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.551763] env[68569]: INFO nova.compute.manager [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Terminating instance [ 879.562949] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5233051b-0ab8-8446-373b-d190bb465659, 'name': SearchDatastore_Task, 'duration_secs': 0.016107} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.564244] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e79a306-dd24-4396-9ba1-42eeb8423286 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.569615] env[68569]: DEBUG oslo_concurrency.lockutils [req-76889d39-3af0-42ca-9e74-c3928b20d23e req-17044dd0-fb06-48e9-8651-37116fbe578a service nova] Releasing lock "refresh_cache-61aa0997-ffa6-4551-bdaa-132026e240f9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.570320] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 879.570320] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5209de99-992d-617d-79f1-095a7e6384ae" [ 879.570320] env[68569]: _type = "Task" [ 879.570320] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.578213] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5209de99-992d-617d-79f1-095a7e6384ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.589801] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 879.590011] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 879.590207] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Deleting the datastore file [datastore1] dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.590462] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-51cde6ef-c8a1-4b40-9d34-010a20a5976c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.597175] env[68569]: DEBUG oslo_vmware.api [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Waiting for the task: (returnval){ [ 879.597175] env[68569]: value = "task-3167159" [ 879.597175] env[68569]: _type = "Task" [ 879.597175] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.605707] env[68569]: DEBUG oslo_vmware.api [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167159, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.007101] env[68569]: DEBUG nova.scheduler.client.report [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 105 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 880.007385] env[68569]: DEBUG nova.compute.provider_tree [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 105 to 106 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 880.007561] env[68569]: DEBUG nova.compute.provider_tree [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 880.059148] env[68569]: DEBUG nova.compute.manager [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 880.059341] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 880.060523] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff45f69a-de6a-4ee5-b331-01b41774a06e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.071024] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 880.071024] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31ae92c4-0a68-4b1b-9e12-dffde39c93fd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.080093] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5209de99-992d-617d-79f1-095a7e6384ae, 'name': SearchDatastore_Task, 'duration_secs': 0.010249} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.081366] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.081662] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] f84204a9-aeea-498e-9682-298e581b34e3/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk. {{(pid=68569) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 880.082043] env[68569]: DEBUG oslo_vmware.api [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 880.082043] env[68569]: value = "task-3167160" [ 880.082043] env[68569]: _type = "Task" [ 880.082043] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.082323] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.082495] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 880.082701] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-60532af6-26e1-4c6d-a3f4-a4f7b1363599 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.084670] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d001ed51-3500-4739-87f0-37b5f8937a01 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.095850] env[68569]: DEBUG oslo_vmware.api [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167160, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.097937] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 880.098129] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 880.098988] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 880.098988] env[68569]: value = "task-3167161" [ 880.098988] env[68569]: _type = "Task" [ 880.098988] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.099203] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-930d7a57-305e-4175-af21-fb40b3642ae0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.112450] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167161, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.116061] env[68569]: DEBUG oslo_vmware.api [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Task: {'id': task-3167159, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160779} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.116363] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 880.116363] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5243ed26-eea3-bfc9-92df-9da0acd07303" [ 880.116363] env[68569]: _type = "Task" [ 880.116363] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.116602] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.116816] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.117017] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.117268] env[68569]: INFO nova.compute.manager [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Took 1.13 seconds to destroy the instance on the hypervisor. [ 880.117497] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 880.117720] env[68569]: DEBUG nova.compute.manager [-] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 880.117987] env[68569]: DEBUG nova.network.neutron [-] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.128529] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5243ed26-eea3-bfc9-92df-9da0acd07303, 'name': SearchDatastore_Task, 'duration_secs': 0.008392} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.129331] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7122fcf-bbc6-4526-974f-7a2206394b0a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.134559] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 880.134559] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c89fc3-6340-5f57-97e0-8152cda21267" [ 880.134559] env[68569]: _type = "Task" [ 880.134559] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.143327] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c89fc3-6340-5f57-97e0-8152cda21267, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.514717] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.237s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.519409] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.685s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.523132] env[68569]: INFO nova.compute.claims [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.545208] env[68569]: INFO nova.scheduler.client.report [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Deleted allocations for instance ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae [ 880.595404] env[68569]: DEBUG oslo_vmware.api [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167160, 'name': PowerOffVM_Task, 'duration_secs': 0.176628} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.597492] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 880.597492] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 880.597492] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd647527-b081-415f-8cef-aabc168953ed {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.612395] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167161, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.645293] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c89fc3-6340-5f57-97e0-8152cda21267, 'name': SearchDatastore_Task, 'duration_secs': 0.009375} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.645832] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.646043] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 61aa0997-ffa6-4551-bdaa-132026e240f9/61aa0997-ffa6-4551-bdaa-132026e240f9.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 880.646360] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c4f681d-905e-404c-a829-3e250ce8b494 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.652590] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 880.652590] env[68569]: value = "task-3167163" [ 880.652590] env[68569]: _type = "Task" [ 880.652590] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.659021] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 880.659021] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 880.659021] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Deleting the datastore file [datastore2] fb97d2dd-d42a-42e8-9a36-5c913a58b891 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 880.659021] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f484dd02-8a26-4a9b-8c78-a1c28103ce26 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.665446] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167163, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.671442] env[68569]: DEBUG oslo_vmware.api [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for the task: (returnval){ [ 880.671442] env[68569]: value = "task-3167164" [ 880.671442] env[68569]: _type = "Task" [ 880.671442] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.682503] env[68569]: DEBUG oslo_vmware.api [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167164, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.684852] env[68569]: DEBUG nova.compute.manager [req-834b75bb-cddc-41ca-8619-4ade1f384cca req-f4c15354-2896-4141-afeb-1b391b0fa19e service nova] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Received event network-vif-deleted-289a9512-486e-4b33-bb13-4b51795aaf2b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 880.686985] env[68569]: INFO nova.compute.manager [req-834b75bb-cddc-41ca-8619-4ade1f384cca req-f4c15354-2896-4141-afeb-1b391b0fa19e service nova] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Neutron deleted interface 289a9512-486e-4b33-bb13-4b51795aaf2b; detaching it from the instance and deleting it from the info cache [ 880.686985] env[68569]: DEBUG nova.network.neutron [req-834b75bb-cddc-41ca-8619-4ade1f384cca req-f4c15354-2896-4141-afeb-1b391b0fa19e service nova] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.925306] env[68569]: DEBUG nova.network.neutron [-] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.053455] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4356c9af-7cba-43d8-8283-22d56d063a3f tempest-ListServersNegativeTestJSON-1252951075 tempest-ListServersNegativeTestJSON-1252951075-project-member] Lock "ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.992s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.116400] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167161, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525502} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.116687] env[68569]: INFO nova.virt.vmwareapi.ds_util [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] f84204a9-aeea-498e-9682-298e581b34e3/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk. [ 881.117522] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f070db7-1088-406e-80c9-9c9ad8317558 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.142432] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] f84204a9-aeea-498e-9682-298e581b34e3/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 881.143100] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-104e562a-dca8-4df8-9424-ec556977bf79 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.164736] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167163, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476384} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.165980] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 61aa0997-ffa6-4551-bdaa-132026e240f9/61aa0997-ffa6-4551-bdaa-132026e240f9.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 881.166240] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 881.166555] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 881.166555] env[68569]: value = "task-3167165" [ 881.166555] env[68569]: _type = "Task" [ 881.166555] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.166742] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-145533f3-7b90-45d2-8020-7a0d650e6f10 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.176466] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 881.176466] env[68569]: value = "task-3167166" [ 881.176466] env[68569]: _type = "Task" [ 881.176466] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.185113] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167165, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.185578] env[68569]: DEBUG oslo_vmware.api [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Task: {'id': task-3167164, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154854} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.185945] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 881.186151] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 881.186488] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 881.186558] env[68569]: INFO nova.compute.manager [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Took 1.13 seconds to destroy the instance on the hypervisor. [ 881.186800] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 881.186999] env[68569]: DEBUG nova.compute.manager [-] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 881.187745] env[68569]: DEBUG nova.network.neutron [-] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 881.191551] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167166, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.192080] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a32c296-00fa-4d4d-b4bd-3f3765880273 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.202738] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5836f44c-4d15-435a-afae-7841ef0697c8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.238542] env[68569]: DEBUG nova.compute.manager [req-834b75bb-cddc-41ca-8619-4ade1f384cca req-f4c15354-2896-4141-afeb-1b391b0fa19e service nova] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Detach interface failed, port_id=289a9512-486e-4b33-bb13-4b51795aaf2b, reason: Instance dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 881.427711] env[68569]: INFO nova.compute.manager [-] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Took 1.31 seconds to deallocate network for instance. [ 881.680404] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167165, 'name': ReconfigVM_Task, 'duration_secs': 0.285556} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.687685] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Reconfigured VM instance instance-0000003f to attach disk [datastore1] f84204a9-aeea-498e-9682-298e581b34e3/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 881.688736] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ecc25dc-a082-4103-8e4b-1778971fdb15 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.698967] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167166, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068851} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.719787] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 881.728529] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908d324d-514f-4055-82fe-f495f7ccbbfe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.732358] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f54c0840-420e-41ab-a4ad-2c52f61e1d96 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.766488] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 61aa0997-ffa6-4551-bdaa-132026e240f9/61aa0997-ffa6-4551-bdaa-132026e240f9.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 881.770572] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a28159f6-de87-40bd-8ae2-3c9b59869819 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.785316] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 881.785316] env[68569]: value = "task-3167167" [ 881.785316] env[68569]: _type = "Task" [ 881.785316] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.790765] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 881.790765] env[68569]: value = "task-3167168" [ 881.790765] env[68569]: _type = "Task" [ 881.790765] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.800455] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167167, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.805521] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167168, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.933770] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.987836] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9d78d9-46f3-407a-be7d-d6ba5cade8bd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.995799] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f025ba62-b0cc-44a0-85bd-5a26a0b5a8f8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.030027] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ca7b64-fc0d-4c25-97cc-1e086c1a7c36 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.036445] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38dd7c8c-e649-45fb-b3a0-7f942168c5c6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.052864] env[68569]: DEBUG nova.compute.provider_tree [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.298632] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167167, 'name': ReconfigVM_Task, 'duration_secs': 0.158823} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.299342] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 882.299604] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1178237a-ed7b-4e5f-bbfa-51f7927c8670 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.305491] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167168, 'name': ReconfigVM_Task, 'duration_secs': 0.328324} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.307178] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 61aa0997-ffa6-4551-bdaa-132026e240f9/61aa0997-ffa6-4551-bdaa-132026e240f9.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 882.307595] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 882.307595] env[68569]: value = "task-3167169" [ 882.307595] env[68569]: _type = "Task" [ 882.307595] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.307793] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aef5e6bb-4064-4afe-a0d3-467777f1cee6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.318841] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 882.318841] env[68569]: value = "task-3167170" [ 882.318841] env[68569]: _type = "Task" [ 882.318841] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.322445] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167169, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.334377] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167170, 'name': Rename_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.421549] env[68569]: DEBUG nova.network.neutron [-] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.555811] env[68569]: DEBUG nova.scheduler.client.report [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 882.710459] env[68569]: DEBUG nova.compute.manager [req-12b39dea-f580-4afd-8ef1-d857bb3049db req-7de77e15-d4d7-46da-a603-5c3a61a4ab4b service nova] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Received event network-vif-deleted-7d78a65a-5ed7-419c-b054-a78d424ca795 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 882.819861] env[68569]: DEBUG oslo_vmware.api [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167169, 'name': PowerOnVM_Task, 'duration_secs': 0.41182} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.820232] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 882.823262] env[68569]: DEBUG nova.compute.manager [None req-c89369c2-0730-4b55-8b32-14d52a8403c8 tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 882.824226] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d414e69c-5353-4d19-9a88-e25d031b3d5f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.843408] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167170, 'name': Rename_Task, 'duration_secs': 0.156106} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.844356] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 882.844356] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43a1939a-d501-4292-bddb-00310aeeb363 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.853187] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 882.853187] env[68569]: value = "task-3167171" [ 882.853187] env[68569]: _type = "Task" [ 882.853187] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.862565] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167171, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.925036] env[68569]: INFO nova.compute.manager [-] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Took 1.74 seconds to deallocate network for instance. [ 883.061146] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.543s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.061566] env[68569]: DEBUG nova.compute.manager [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 883.065126] env[68569]: DEBUG oslo_concurrency.lockutils [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.480s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.065416] env[68569]: DEBUG nova.objects.instance [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lazy-loading 'resources' on Instance uuid 57a63648-83e9-4f23-aebc-050e58149ce2 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 883.363370] env[68569]: DEBUG oslo_vmware.api [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167171, 'name': PowerOnVM_Task, 'duration_secs': 0.475151} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.363644] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 883.363841] env[68569]: INFO nova.compute.manager [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Took 8.25 seconds to spawn the instance on the hypervisor. [ 883.364058] env[68569]: DEBUG nova.compute.manager [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 883.364971] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1907398b-b9f3-4ef7-827a-3457526f9081 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.431565] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.567383] env[68569]: DEBUG nova.compute.utils [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 883.571925] env[68569]: DEBUG nova.compute.manager [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 883.572126] env[68569]: DEBUG nova.network.neutron [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 883.639813] env[68569]: DEBUG nova.policy [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b5c37a78c2d429fb2c6566186dfcb94', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ec7a9d5c742c4b91891bf31270a92e8e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 883.770188] env[68569]: DEBUG nova.compute.manager [req-578023a4-4c81-41f2-a7c4-4918e75fab63 req-d990fe9a-b995-4998-a575-a2341cffbf8f service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Received event network-changed-471748e7-587f-46c4-b8fd-1b7c1700ca69 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 883.770188] env[68569]: DEBUG nova.compute.manager [req-578023a4-4c81-41f2-a7c4-4918e75fab63 req-d990fe9a-b995-4998-a575-a2341cffbf8f service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Refreshing instance network info cache due to event network-changed-471748e7-587f-46c4-b8fd-1b7c1700ca69. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 883.770188] env[68569]: DEBUG oslo_concurrency.lockutils [req-578023a4-4c81-41f2-a7c4-4918e75fab63 req-d990fe9a-b995-4998-a575-a2341cffbf8f service nova] Acquiring lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.770188] env[68569]: DEBUG oslo_concurrency.lockutils [req-578023a4-4c81-41f2-a7c4-4918e75fab63 req-d990fe9a-b995-4998-a575-a2341cffbf8f service nova] Acquired lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.770188] env[68569]: DEBUG nova.network.neutron [req-578023a4-4c81-41f2-a7c4-4918e75fab63 req-d990fe9a-b995-4998-a575-a2341cffbf8f service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Refreshing network info cache for port 471748e7-587f-46c4-b8fd-1b7c1700ca69 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 883.887760] env[68569]: INFO nova.compute.manager [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Took 51.60 seconds to build instance. [ 883.984500] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96929c00-3775-41a7-8a88-abb532b4691f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.993387] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d12eb4-1d58-4b4e-a5c5-5b1debf185c9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.028940] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92884d0d-6f40-46a4-bb24-99927ec1957d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.037066] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d59a56-15e2-4536-a043-611dfeeca264 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.052177] env[68569]: DEBUG nova.compute.provider_tree [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.075601] env[68569]: DEBUG nova.compute.manager [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 884.263465] env[68569]: DEBUG nova.network.neutron [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Successfully created port: f270ffee-b36c-4ea6-bfca-2f839c671e61 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 884.389738] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dbc8a5f-86d7-4cc7-a48d-a1729fd04a89 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "61aa0997-ffa6-4551-bdaa-132026e240f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.750s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.555316] env[68569]: DEBUG nova.scheduler.client.report [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 884.580615] env[68569]: INFO nova.virt.block_device [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Booting with volume e0560587-266e-42c8-ae9a-a3f92ec6742b at /dev/sda [ 884.622819] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-886eebb2-34c6-4635-8219-c1041024834d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.632740] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046e21cb-3450-45e0-9a0c-3b6d28ab2d74 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.669107] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a8094e7-c37b-49c6-8544-b01cabc95fa0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.677491] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9f1dff-f33e-4e07-b381-089d64046f54 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.712561] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2c0efb-7fe0-4350-a279-9bbed0552fe2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.720594] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6953ea04-7a11-40a8-8e70-3e52d7e5e5db {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.733852] env[68569]: DEBUG nova.virt.block_device [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Updating existing volume attachment record: 6df0ab8b-b0d6-4ad1-a07f-5fd8cddacce0 {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 884.738906] env[68569]: DEBUG nova.network.neutron [req-578023a4-4c81-41f2-a7c4-4918e75fab63 req-d990fe9a-b995-4998-a575-a2341cffbf8f service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Updated VIF entry in instance network info cache for port 471748e7-587f-46c4-b8fd-1b7c1700ca69. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 884.739249] env[68569]: DEBUG nova.network.neutron [req-578023a4-4c81-41f2-a7c4-4918e75fab63 req-d990fe9a-b995-4998-a575-a2341cffbf8f service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Updating instance_info_cache with network_info: [{"id": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "address": "fa:16:3e:12:4c:d5", "network": {"id": "675522f4-7e1e-4b34-b8d4-6fad48ba3550", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1678959422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3098015751d54ac28e7171fc2948da9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471748e7-58", "ovs_interfaceid": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.895187] env[68569]: DEBUG nova.compute.manager [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 885.060565] env[68569]: DEBUG oslo_concurrency.lockutils [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.995s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.062960] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.407s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.063326] env[68569]: DEBUG nova.objects.instance [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lazy-loading 'resources' on Instance uuid 60aa85f3-edac-40e0-ad31-a8f93219e380 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 885.086491] env[68569]: INFO nova.scheduler.client.report [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Deleted allocations for instance 57a63648-83e9-4f23-aebc-050e58149ce2 [ 885.242186] env[68569]: DEBUG oslo_concurrency.lockutils [req-578023a4-4c81-41f2-a7c4-4918e75fab63 req-d990fe9a-b995-4998-a575-a2341cffbf8f service nova] Releasing lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.423789] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.594782] env[68569]: DEBUG oslo_concurrency.lockutils [None req-251727e9-c398-43a8-857f-ea4956e55a7c tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "57a63648-83e9-4f23-aebc-050e58149ce2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.599s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.992254] env[68569]: DEBUG nova.compute.manager [req-af1fc100-8d5b-4e9f-b518-5abf184dbeca req-18af7b0f-84a2-49b0-88e2-c86460fde938 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Received event network-changed-471748e7-587f-46c4-b8fd-1b7c1700ca69 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 885.992555] env[68569]: DEBUG nova.compute.manager [req-af1fc100-8d5b-4e9f-b518-5abf184dbeca req-18af7b0f-84a2-49b0-88e2-c86460fde938 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Refreshing instance network info cache due to event network-changed-471748e7-587f-46c4-b8fd-1b7c1700ca69. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 885.992747] env[68569]: DEBUG oslo_concurrency.lockutils [req-af1fc100-8d5b-4e9f-b518-5abf184dbeca req-18af7b0f-84a2-49b0-88e2-c86460fde938 service nova] Acquiring lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.992898] env[68569]: DEBUG oslo_concurrency.lockutils [req-af1fc100-8d5b-4e9f-b518-5abf184dbeca req-18af7b0f-84a2-49b0-88e2-c86460fde938 service nova] Acquired lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.994121] env[68569]: DEBUG nova.network.neutron [req-af1fc100-8d5b-4e9f-b518-5abf184dbeca req-18af7b0f-84a2-49b0-88e2-c86460fde938 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Refreshing network info cache for port 471748e7-587f-46c4-b8fd-1b7c1700ca69 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 886.046787] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3336ef9-d1f4-4119-9afe-e358c5249d5e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.059070] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa35627-4ccc-4d39-a891-6401dcf338de {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.095935] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020492ef-7f82-4ca8-adc6-4b2236aa5d2c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.101332] env[68569]: DEBUG nova.compute.manager [req-9d3373ec-b8de-4476-8075-c0aad04533fb req-062409dd-7994-4e63-bcc7-81f3612183dc service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Received event network-vif-plugged-f270ffee-b36c-4ea6-bfca-2f839c671e61 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 886.101599] env[68569]: DEBUG oslo_concurrency.lockutils [req-9d3373ec-b8de-4476-8075-c0aad04533fb req-062409dd-7994-4e63-bcc7-81f3612183dc service nova] Acquiring lock "a591b671-ca84-47b5-9831-63478d55fd07-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.101735] env[68569]: DEBUG oslo_concurrency.lockutils [req-9d3373ec-b8de-4476-8075-c0aad04533fb req-062409dd-7994-4e63-bcc7-81f3612183dc service nova] Lock "a591b671-ca84-47b5-9831-63478d55fd07-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.101891] env[68569]: DEBUG oslo_concurrency.lockutils [req-9d3373ec-b8de-4476-8075-c0aad04533fb req-062409dd-7994-4e63-bcc7-81f3612183dc service nova] Lock "a591b671-ca84-47b5-9831-63478d55fd07-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.102054] env[68569]: DEBUG nova.compute.manager [req-9d3373ec-b8de-4476-8075-c0aad04533fb req-062409dd-7994-4e63-bcc7-81f3612183dc service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] No waiting events found dispatching network-vif-plugged-f270ffee-b36c-4ea6-bfca-2f839c671e61 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 886.102283] env[68569]: WARNING nova.compute.manager [req-9d3373ec-b8de-4476-8075-c0aad04533fb req-062409dd-7994-4e63-bcc7-81f3612183dc service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Received unexpected event network-vif-plugged-f270ffee-b36c-4ea6-bfca-2f839c671e61 for instance with vm_state building and task_state block_device_mapping. [ 886.108185] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259cf4eb-3674-445c-bd05-59eff3468c5a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.124443] env[68569]: DEBUG nova.compute.provider_tree [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.196915] env[68569]: DEBUG nova.network.neutron [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Successfully updated port: f270ffee-b36c-4ea6-bfca-2f839c671e61 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 886.627898] env[68569]: DEBUG nova.scheduler.client.report [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 886.701865] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Acquiring lock "refresh_cache-a591b671-ca84-47b5-9831-63478d55fd07" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.702019] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Acquired lock "refresh_cache-a591b671-ca84-47b5-9831-63478d55fd07" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 886.702182] env[68569]: DEBUG nova.network.neutron [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 886.834296] env[68569]: DEBUG nova.network.neutron [req-af1fc100-8d5b-4e9f-b518-5abf184dbeca req-18af7b0f-84a2-49b0-88e2-c86460fde938 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Updated VIF entry in instance network info cache for port 471748e7-587f-46c4-b8fd-1b7c1700ca69. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 886.835662] env[68569]: DEBUG nova.network.neutron [req-af1fc100-8d5b-4e9f-b518-5abf184dbeca req-18af7b0f-84a2-49b0-88e2-c86460fde938 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Updating instance_info_cache with network_info: [{"id": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "address": "fa:16:3e:12:4c:d5", "network": {"id": "675522f4-7e1e-4b34-b8d4-6fad48ba3550", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1678959422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3098015751d54ac28e7171fc2948da9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471748e7-58", "ovs_interfaceid": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.841563] env[68569]: DEBUG nova.compute.manager [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 886.842086] env[68569]: DEBUG nova.virt.hardware [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 886.842295] env[68569]: DEBUG nova.virt.hardware [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 886.842441] env[68569]: DEBUG nova.virt.hardware [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 886.842615] env[68569]: DEBUG nova.virt.hardware [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 886.842751] env[68569]: DEBUG nova.virt.hardware [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 886.842888] env[68569]: DEBUG nova.virt.hardware [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 886.843096] env[68569]: DEBUG nova.virt.hardware [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 886.843245] env[68569]: DEBUG nova.virt.hardware [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 886.843784] env[68569]: DEBUG nova.virt.hardware [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 886.843784] env[68569]: DEBUG nova.virt.hardware [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 886.843784] env[68569]: DEBUG nova.virt.hardware [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 886.844932] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f629287f-02d0-46ac-9282-3c3c3281d5ef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.853630] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a46ce3-b371-4243-808f-60c62d1dc42e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.133501] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.070s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.136262] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.119s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.137820] env[68569]: INFO nova.compute.claims [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 887.162777] env[68569]: INFO nova.scheduler.client.report [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Deleted allocations for instance 60aa85f3-edac-40e0-ad31-a8f93219e380 [ 887.262243] env[68569]: DEBUG nova.network.neutron [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.337534] env[68569]: DEBUG oslo_concurrency.lockutils [req-af1fc100-8d5b-4e9f-b518-5abf184dbeca req-18af7b0f-84a2-49b0-88e2-c86460fde938 service nova] Releasing lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.494248] env[68569]: DEBUG nova.network.neutron [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Updating instance_info_cache with network_info: [{"id": "f270ffee-b36c-4ea6-bfca-2f839c671e61", "address": "fa:16:3e:44:18:35", "network": {"id": "b2cbc274-881a-457c-8cf9-4bcf861f755a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-438306757-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec7a9d5c742c4b91891bf31270a92e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf270ffee-b3", "ovs_interfaceid": "f270ffee-b36c-4ea6-bfca-2f839c671e61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.670710] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6430f30f-73ac-47e3-a0da-216e7a3dd5b7 tempest-MultipleCreateTestJSON-1756840802 tempest-MultipleCreateTestJSON-1756840802-project-member] Lock "60aa85f3-edac-40e0-ad31-a8f93219e380" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.550s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.808189] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Acquiring lock "2cf8803a-8078-4832-a736-330d6bcde6ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.808705] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Lock "2cf8803a-8078-4832-a736-330d6bcde6ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.998032] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Releasing lock "refresh_cache-a591b671-ca84-47b5-9831-63478d55fd07" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.998032] env[68569]: DEBUG nova.compute.manager [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Instance network_info: |[{"id": "f270ffee-b36c-4ea6-bfca-2f839c671e61", "address": "fa:16:3e:44:18:35", "network": {"id": "b2cbc274-881a-457c-8cf9-4bcf861f755a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-438306757-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec7a9d5c742c4b91891bf31270a92e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf270ffee-b3", "ovs_interfaceid": "f270ffee-b36c-4ea6-bfca-2f839c671e61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 887.998455] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:18:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f270ffee-b36c-4ea6-bfca-2f839c671e61', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 888.007379] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Creating folder: Project (ec7a9d5c742c4b91891bf31270a92e8e). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 888.007679] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d841b006-6cf8-4108-aadb-3a4b99f7e0dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.025805] env[68569]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 888.025988] env[68569]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68569) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 888.026448] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Folder already exists: Project (ec7a9d5c742c4b91891bf31270a92e8e). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 888.026560] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Creating folder: Instances. Parent ref: group-v633582. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 888.026790] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4e18cd9-a11a-46e5-9f96-aef519f9fe38 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.036149] env[68569]: DEBUG nova.compute.manager [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Received event network-changed-5d59acab-5f9d-44bc-ac8c-231dda0ac182 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 888.036353] env[68569]: DEBUG nova.compute.manager [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Refreshing instance network info cache due to event network-changed-5d59acab-5f9d-44bc-ac8c-231dda0ac182. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 888.036565] env[68569]: DEBUG oslo_concurrency.lockutils [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] Acquiring lock "refresh_cache-61aa0997-ffa6-4551-bdaa-132026e240f9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.036771] env[68569]: DEBUG oslo_concurrency.lockutils [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] Acquired lock "refresh_cache-61aa0997-ffa6-4551-bdaa-132026e240f9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 888.036938] env[68569]: DEBUG nova.network.neutron [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Refreshing network info cache for port 5d59acab-5f9d-44bc-ac8c-231dda0ac182 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 888.039577] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Created folder: Instances in parent group-v633582. [ 888.039828] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 888.040184] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 888.040387] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-354115b9-7797-454a-b956-f117a6255e2a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.060037] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 888.060037] env[68569]: value = "task-3167174" [ 888.060037] env[68569]: _type = "Task" [ 888.060037] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.067870] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167174, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.169057] env[68569]: DEBUG nova.compute.manager [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Received event network-changed-f270ffee-b36c-4ea6-bfca-2f839c671e61 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 888.169357] env[68569]: DEBUG nova.compute.manager [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Refreshing instance network info cache due to event network-changed-f270ffee-b36c-4ea6-bfca-2f839c671e61. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 888.170603] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] Acquiring lock "refresh_cache-a591b671-ca84-47b5-9831-63478d55fd07" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.170603] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] Acquired lock "refresh_cache-a591b671-ca84-47b5-9831-63478d55fd07" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 888.170603] env[68569]: DEBUG nova.network.neutron [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Refreshing network info cache for port f270ffee-b36c-4ea6-bfca-2f839c671e61 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 888.575640] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167174, 'name': CreateVM_Task, 'duration_secs': 0.305494} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.575822] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 888.577663] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633587', 'volume_id': 'e0560587-266e-42c8-ae9a-a3f92ec6742b', 'name': 'volume-e0560587-266e-42c8-ae9a-a3f92ec6742b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a591b671-ca84-47b5-9831-63478d55fd07', 'attached_at': '', 'detached_at': '', 'volume_id': 'e0560587-266e-42c8-ae9a-a3f92ec6742b', 'serial': 'e0560587-266e-42c8-ae9a-a3f92ec6742b'}, 'delete_on_termination': True, 'disk_bus': None, 'boot_index': 0, 'mount_device': '/dev/sda', 'guest_format': None, 'attachment_id': '6df0ab8b-b0d6-4ad1-a07f-5fd8cddacce0', 'volume_type': None}], 'swap': None} {{(pid=68569) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 888.578024] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Root volume attach. Driver type: vmdk {{(pid=68569) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 888.579084] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35189f1c-a189-49b8-98a6-c8673969baa4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.593055] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0fe2c6-a64e-40e9-9b12-d91a7a06f962 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.605313] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d6f18c-2cc0-48fe-92d5-9dbfb54cec7d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.609925] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-6d3faf32-ae2c-4c7d-9a79-fb0d5ca128d7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.617175] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Waiting for the task: (returnval){ [ 888.617175] env[68569]: value = "task-3167175" [ 888.617175] env[68569]: _type = "Task" [ 888.617175] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.627643] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167175, 'name': RelocateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.637767] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0959ccc8-cdf1-4ef6-b460-56ae0652314e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.645880] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9f75c5-cdb9-405b-b539-fddd899ed4a0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.680786] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27efd052-b935-4a1b-a0f8-eb6b100480f3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.691776] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e1ec08-fe75-4128-84dc-c3b2870dce66 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.708582] env[68569]: DEBUG nova.compute.provider_tree [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.909881] env[68569]: DEBUG nova.network.neutron [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Updated VIF entry in instance network info cache for port 5d59acab-5f9d-44bc-ac8c-231dda0ac182. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 888.910292] env[68569]: DEBUG nova.network.neutron [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Updating instance_info_cache with network_info: [{"id": "5d59acab-5f9d-44bc-ac8c-231dda0ac182", "address": "fa:16:3e:b2:78:e5", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d59acab-5f", "ovs_interfaceid": "5d59acab-5f9d-44bc-ac8c-231dda0ac182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.010916] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquiring lock "f84204a9-aeea-498e-9682-298e581b34e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.011214] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Lock "f84204a9-aeea-498e-9682-298e581b34e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.011425] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquiring lock "f84204a9-aeea-498e-9682-298e581b34e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.011614] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Lock "f84204a9-aeea-498e-9682-298e581b34e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.011864] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Lock "f84204a9-aeea-498e-9682-298e581b34e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.014138] env[68569]: INFO nova.compute.manager [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Terminating instance [ 889.095785] env[68569]: DEBUG nova.network.neutron [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Updated VIF entry in instance network info cache for port f270ffee-b36c-4ea6-bfca-2f839c671e61. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 889.096164] env[68569]: DEBUG nova.network.neutron [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Updating instance_info_cache with network_info: [{"id": "f270ffee-b36c-4ea6-bfca-2f839c671e61", "address": "fa:16:3e:44:18:35", "network": {"id": "b2cbc274-881a-457c-8cf9-4bcf861f755a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-438306757-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec7a9d5c742c4b91891bf31270a92e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf270ffee-b3", "ovs_interfaceid": "f270ffee-b36c-4ea6-bfca-2f839c671e61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.127499] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167175, 'name': RelocateVM_Task} progress is 42%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.215958] env[68569]: DEBUG nova.scheduler.client.report [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 889.414625] env[68569]: DEBUG oslo_concurrency.lockutils [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] Releasing lock "refresh_cache-61aa0997-ffa6-4551-bdaa-132026e240f9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.414998] env[68569]: DEBUG nova.compute.manager [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Received event network-changed-471748e7-587f-46c4-b8fd-1b7c1700ca69 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 889.415215] env[68569]: DEBUG nova.compute.manager [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Refreshing instance network info cache due to event network-changed-471748e7-587f-46c4-b8fd-1b7c1700ca69. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 889.415472] env[68569]: DEBUG oslo_concurrency.lockutils [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] Acquiring lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.415673] env[68569]: DEBUG oslo_concurrency.lockutils [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] Acquired lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.415871] env[68569]: DEBUG nova.network.neutron [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Refreshing network info cache for port 471748e7-587f-46c4-b8fd-1b7c1700ca69 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 889.522649] env[68569]: DEBUG nova.compute.manager [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 889.522895] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 889.523823] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3adc2ab-4f6c-4540-9433-3127a1176792 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.532663] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 889.532999] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-103e02b2-fc15-4576-a4aa-c28463de00f1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.540385] env[68569]: DEBUG oslo_vmware.api [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 889.540385] env[68569]: value = "task-3167176" [ 889.540385] env[68569]: _type = "Task" [ 889.540385] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.550149] env[68569]: DEBUG oslo_vmware.api [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.599551] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] Releasing lock "refresh_cache-a591b671-ca84-47b5-9831-63478d55fd07" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.599897] env[68569]: DEBUG nova.compute.manager [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Received event network-changed-471748e7-587f-46c4-b8fd-1b7c1700ca69 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 889.600302] env[68569]: DEBUG nova.compute.manager [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Refreshing instance network info cache due to event network-changed-471748e7-587f-46c4-b8fd-1b7c1700ca69. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 889.600505] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] Acquiring lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.628344] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167175, 'name': RelocateVM_Task} progress is 54%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.721743] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.585s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.721876] env[68569]: DEBUG nova.compute.manager [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 889.724683] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 43.574s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.050205] env[68569]: DEBUG oslo_vmware.api [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.131951] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167175, 'name': RelocateVM_Task} progress is 67%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.170851] env[68569]: DEBUG nova.network.neutron [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Updated VIF entry in instance network info cache for port 471748e7-587f-46c4-b8fd-1b7c1700ca69. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 890.170851] env[68569]: DEBUG nova.network.neutron [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Updating instance_info_cache with network_info: [{"id": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "address": "fa:16:3e:12:4c:d5", "network": {"id": "675522f4-7e1e-4b34-b8d4-6fad48ba3550", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1678959422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3098015751d54ac28e7171fc2948da9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471748e7-58", "ovs_interfaceid": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.229207] env[68569]: DEBUG nova.compute.utils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 890.240161] env[68569]: DEBUG nova.compute.manager [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 890.240359] env[68569]: DEBUG nova.network.neutron [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 890.288991] env[68569]: DEBUG nova.policy [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65e5bd07f5f046a8977c954cd1ab11e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58942a8ed4f742328cf9a9ded530b266', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 890.551022] env[68569]: DEBUG oslo_vmware.api [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.629272] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167175, 'name': RelocateVM_Task} progress is 81%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.673736] env[68569]: DEBUG oslo_concurrency.lockutils [req-b1dc82ea-50c5-45f5-9124-2fc24101e0cb req-6d981b25-8531-40b4-bc53-1bdbad7d0291 service nova] Releasing lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.674281] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] Acquired lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.674556] env[68569]: DEBUG nova.network.neutron [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Refreshing network info cache for port 471748e7-587f-46c4-b8fd-1b7c1700ca69 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 890.678937] env[68569]: DEBUG nova.network.neutron [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Successfully created port: b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 890.741232] env[68569]: DEBUG nova.compute.manager [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 890.778554] env[68569]: WARNING nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance fb97d2dd-d42a-42e8-9a36-5c913a58b891 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 890.778927] env[68569]: WARNING nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance de2b0206-0c73-4275-89ff-37199520dd71 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 890.779039] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance a9e87dfc-6e00-4e55-8a8f-bc3174b991da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.779172] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 398dd3c7-c630-4a29-b204-80f6fb394ce8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.779271] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance b40c9dec-cebc-4d23-8df4-96e804333706 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.779443] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance fd803a5e-8dbd-449e-b45d-1e6410a286e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.779583] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 53cc8dbd-c163-403a-9286-e1f8ad939f94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.779699] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance b770fbd1-579a-4e3e-a5c9-9f030695f057 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.779830] env[68569]: WARNING nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance ab021831-2cc3-4457-aa55-b55036c2a423 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 890.779940] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 123a6895-af16-493a-afce-7ae6c2137422 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.780171] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 060fc4c8-b173-4fc4-8232-e13e3eac9dc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.780171] env[68569]: WARNING nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance a63b06a1-c24e-4013-a1f4-b227732a1e05 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 890.780887] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 6824efd5-427b-420d-83d5-a1d5acd94bf9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.780887] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance f84204a9-aeea-498e-9682-298e581b34e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.780887] env[68569]: WARNING nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 890.780887] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 61aa0997-ffa6-4551-bdaa-132026e240f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.781028] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance a591b671-ca84-47b5-9831-63478d55fd07 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.781028] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance db75de86-9dda-42b2-9e7a-55e2ba5adad1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.052203] env[68569]: DEBUG oslo_vmware.api [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167176, 'name': PowerOffVM_Task, 'duration_secs': 1.278067} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.052203] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 891.052203] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 891.052203] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5d350dec-4302-4d96-a585-afbb73e4f7ab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.129763] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167175, 'name': RelocateVM_Task} progress is 97%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.141974] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 891.142240] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 891.142428] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Deleting the datastore file [datastore1] f84204a9-aeea-498e-9682-298e581b34e3 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 891.142733] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c14d5685-b37b-4aa1-81f6-328411b58aad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.149443] env[68569]: DEBUG oslo_vmware.api [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for the task: (returnval){ [ 891.149443] env[68569]: value = "task-3167178" [ 891.149443] env[68569]: _type = "Task" [ 891.149443] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.157856] env[68569]: DEBUG oslo_vmware.api [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167178, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.284146] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 559a1eff-8892-4dda-a540-4a053ae0ef2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.400572] env[68569]: DEBUG nova.network.neutron [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Updated VIF entry in instance network info cache for port 471748e7-587f-46c4-b8fd-1b7c1700ca69. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 891.401091] env[68569]: DEBUG nova.network.neutron [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Updating instance_info_cache with network_info: [{"id": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "address": "fa:16:3e:12:4c:d5", "network": {"id": "675522f4-7e1e-4b34-b8d4-6fad48ba3550", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1678959422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3098015751d54ac28e7171fc2948da9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap471748e7-58", "ovs_interfaceid": "471748e7-587f-46c4-b8fd-1b7c1700ca69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.629589] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167175, 'name': RelocateVM_Task} progress is 98%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.657849] env[68569]: DEBUG oslo_vmware.api [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Task: {'id': task-3167178, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.332145} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.658137] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 891.658368] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 891.658555] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 891.658763] env[68569]: INFO nova.compute.manager [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Took 2.14 seconds to destroy the instance on the hypervisor. [ 891.659053] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 891.659262] env[68569]: DEBUG nova.compute.manager [-] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 891.659341] env[68569]: DEBUG nova.network.neutron [-] [instance: f84204a9-aeea-498e-9682-298e581b34e3] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 891.757783] env[68569]: DEBUG nova.compute.manager [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 891.784803] env[68569]: DEBUG nova.virt.hardware [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 891.785068] env[68569]: DEBUG nova.virt.hardware [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 891.785228] env[68569]: DEBUG nova.virt.hardware [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 891.785405] env[68569]: DEBUG nova.virt.hardware [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 891.785590] env[68569]: DEBUG nova.virt.hardware [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 891.785783] env[68569]: DEBUG nova.virt.hardware [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 891.785957] env[68569]: DEBUG nova.virt.hardware [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 891.786129] env[68569]: DEBUG nova.virt.hardware [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 891.786296] env[68569]: DEBUG nova.virt.hardware [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 891.786458] env[68569]: DEBUG nova.virt.hardware [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 891.786628] env[68569]: DEBUG nova.virt.hardware [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 891.787370] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 0943cfd5-33fb-4b02-9e4d-93f18385bdae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.789096] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98213af4-19cc-4d74-bc55-a660ea4d78cc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.797819] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468870e7-4bb7-4a44-a2c5-fe8528040deb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.904199] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e0bd41c-9c2d-4ead-9eda-59456bd4e23a req-138f70b3-25b2-44e1-8d82-59abb410d9a8 service nova] Releasing lock "refresh_cache-f84204a9-aeea-498e-9682-298e581b34e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.921268] env[68569]: DEBUG nova.compute.manager [req-af404b21-3e89-48ec-869c-ce8f3261ac61 req-a98d3889-c859-4ab2-91ae-0837c7a8dba7 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Received event network-vif-deleted-471748e7-587f-46c4-b8fd-1b7c1700ca69 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 891.921525] env[68569]: INFO nova.compute.manager [req-af404b21-3e89-48ec-869c-ce8f3261ac61 req-a98d3889-c859-4ab2-91ae-0837c7a8dba7 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Neutron deleted interface 471748e7-587f-46c4-b8fd-1b7c1700ca69; detaching it from the instance and deleting it from the info cache [ 891.921662] env[68569]: DEBUG nova.network.neutron [req-af404b21-3e89-48ec-869c-ce8f3261ac61 req-a98d3889-c859-4ab2-91ae-0837c7a8dba7 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.136021] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167175, 'name': RelocateVM_Task, 'duration_secs': 3.245313} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.136021] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 892.136021] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633587', 'volume_id': 'e0560587-266e-42c8-ae9a-a3f92ec6742b', 'name': 'volume-e0560587-266e-42c8-ae9a-a3f92ec6742b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a591b671-ca84-47b5-9831-63478d55fd07', 'attached_at': '', 'detached_at': '', 'volume_id': 'e0560587-266e-42c8-ae9a-a3f92ec6742b', 'serial': 'e0560587-266e-42c8-ae9a-a3f92ec6742b'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 892.136021] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41cf114b-1782-4ecf-8c86-058491cd2d32 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.152152] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2295d90e-c52e-4293-b585-f429410370d1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.175982] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] volume-e0560587-266e-42c8-ae9a-a3f92ec6742b/volume-e0560587-266e-42c8-ae9a-a3f92ec6742b.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 892.176313] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eecd82c7-33f3-4bbb-9034-df80ce53bf51 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.196777] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Waiting for the task: (returnval){ [ 892.196777] env[68569]: value = "task-3167179" [ 892.196777] env[68569]: _type = "Task" [ 892.196777] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.204533] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167179, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.245677] env[68569]: DEBUG nova.compute.manager [req-d085e8f1-ad8a-45f5-84a3-eb2e97e832a5 req-bfaa05f0-bfe6-4bdd-abbd-20122a2cd945 service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Received event network-vif-plugged-b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 892.245940] env[68569]: DEBUG oslo_concurrency.lockutils [req-d085e8f1-ad8a-45f5-84a3-eb2e97e832a5 req-bfaa05f0-bfe6-4bdd-abbd-20122a2cd945 service nova] Acquiring lock "db75de86-9dda-42b2-9e7a-55e2ba5adad1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.246143] env[68569]: DEBUG oslo_concurrency.lockutils [req-d085e8f1-ad8a-45f5-84a3-eb2e97e832a5 req-bfaa05f0-bfe6-4bdd-abbd-20122a2cd945 service nova] Lock "db75de86-9dda-42b2-9e7a-55e2ba5adad1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.246309] env[68569]: DEBUG oslo_concurrency.lockutils [req-d085e8f1-ad8a-45f5-84a3-eb2e97e832a5 req-bfaa05f0-bfe6-4bdd-abbd-20122a2cd945 service nova] Lock "db75de86-9dda-42b2-9e7a-55e2ba5adad1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.246487] env[68569]: DEBUG nova.compute.manager [req-d085e8f1-ad8a-45f5-84a3-eb2e97e832a5 req-bfaa05f0-bfe6-4bdd-abbd-20122a2cd945 service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] No waiting events found dispatching network-vif-plugged-b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 892.246651] env[68569]: WARNING nova.compute.manager [req-d085e8f1-ad8a-45f5-84a3-eb2e97e832a5 req-bfaa05f0-bfe6-4bdd-abbd-20122a2cd945 service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Received unexpected event network-vif-plugged-b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9 for instance with vm_state building and task_state spawning. [ 892.294110] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.344222] env[68569]: DEBUG nova.network.neutron [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Successfully updated port: b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 892.404110] env[68569]: DEBUG nova.network.neutron [-] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.424163] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa6d13b2-97a8-4c72-99e8-392df24cc809 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.433793] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592b2344-a5a2-4e55-820b-9b1128ec21bf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.469028] env[68569]: DEBUG nova.compute.manager [req-af404b21-3e89-48ec-869c-ce8f3261ac61 req-a98d3889-c859-4ab2-91ae-0837c7a8dba7 service nova] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Detach interface failed, port_id=471748e7-587f-46c4-b8fd-1b7c1700ca69, reason: Instance f84204a9-aeea-498e-9682-298e581b34e3 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 892.706802] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167179, 'name': ReconfigVM_Task, 'duration_secs': 0.266154} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.707101] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Reconfigured VM instance instance-00000042 to attach disk [datastore2] volume-e0560587-266e-42c8-ae9a-a3f92ec6742b/volume-e0560587-266e-42c8-ae9a-a3f92ec6742b.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 892.711673] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b08bf800-90f8-4275-bbbd-a52a615dc9e7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.726840] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Waiting for the task: (returnval){ [ 892.726840] env[68569]: value = "task-3167180" [ 892.726840] env[68569]: _type = "Task" [ 892.726840] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.734743] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167180, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.797445] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance b91304c7-e74d-402b-865b-150e0057c895 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.847023] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Acquiring lock "refresh_cache-db75de86-9dda-42b2-9e7a-55e2ba5adad1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.847163] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Acquired lock "refresh_cache-db75de86-9dda-42b2-9e7a-55e2ba5adad1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.847364] env[68569]: DEBUG nova.network.neutron [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 892.907532] env[68569]: INFO nova.compute.manager [-] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Took 1.25 seconds to deallocate network for instance. [ 893.237217] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167180, 'name': ReconfigVM_Task, 'duration_secs': 0.123978} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.237558] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633587', 'volume_id': 'e0560587-266e-42c8-ae9a-a3f92ec6742b', 'name': 'volume-e0560587-266e-42c8-ae9a-a3f92ec6742b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a591b671-ca84-47b5-9831-63478d55fd07', 'attached_at': '', 'detached_at': '', 'volume_id': 'e0560587-266e-42c8-ae9a-a3f92ec6742b', 'serial': 'e0560587-266e-42c8-ae9a-a3f92ec6742b'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 893.238119] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-27c88370-84f9-4ffa-971b-839325390adc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.244220] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Waiting for the task: (returnval){ [ 893.244220] env[68569]: value = "task-3167181" [ 893.244220] env[68569]: _type = "Task" [ 893.244220] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.253336] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167181, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.300931] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 3551627b-9c90-43ea-bae7-d186eaa53c6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 893.381897] env[68569]: DEBUG nova.network.neutron [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 893.413887] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.500763] env[68569]: DEBUG nova.network.neutron [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Updating instance_info_cache with network_info: [{"id": "b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9", "address": "fa:16:3e:61:3a:58", "network": {"id": "332fd870-afb5-4a9f-b1c9-d87cb33d29ba", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1915965597-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58942a8ed4f742328cf9a9ded530b266", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7baf7ab-5d", "ovs_interfaceid": "b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.754874] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167181, 'name': Rename_Task, 'duration_secs': 0.129475} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.755263] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 893.755582] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c78d77f6-ded7-40ae-9336-0cc9eec90a8b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.762429] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Waiting for the task: (returnval){ [ 893.762429] env[68569]: value = "task-3167182" [ 893.762429] env[68569]: _type = "Task" [ 893.762429] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.772250] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167182, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.804338] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance c0211ed8-5995-48f4-b339-99bd4c93254c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 894.003879] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Releasing lock "refresh_cache-db75de86-9dda-42b2-9e7a-55e2ba5adad1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.004411] env[68569]: DEBUG nova.compute.manager [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Instance network_info: |[{"id": "b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9", "address": "fa:16:3e:61:3a:58", "network": {"id": "332fd870-afb5-4a9f-b1c9-d87cb33d29ba", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1915965597-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58942a8ed4f742328cf9a9ded530b266", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7baf7ab-5d", "ovs_interfaceid": "b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 894.004772] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:3a:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08e9585e-6186-4788-9fd9-24174ce45a6f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 894.012720] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Creating folder: Project (58942a8ed4f742328cf9a9ded530b266). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 894.013011] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d63e12e2-ea65-4df5-94b1-12b4f2d1a764 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.024503] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Created folder: Project (58942a8ed4f742328cf9a9ded530b266) in parent group-v633430. [ 894.024717] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Creating folder: Instances. Parent ref: group-v633623. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 894.024959] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0721f908-4204-490a-8b1c-95e279302747 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.034624] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Created folder: Instances in parent group-v633623. [ 894.034852] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 894.035046] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 894.035245] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21cca165-ade6-4476-bcb2-f448295129fe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.053517] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 894.053517] env[68569]: value = "task-3167185" [ 894.053517] env[68569]: _type = "Task" [ 894.053517] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.060582] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167185, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.272337] env[68569]: DEBUG oslo_vmware.api [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167182, 'name': PowerOnVM_Task, 'duration_secs': 0.455556} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.272646] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 894.272878] env[68569]: INFO nova.compute.manager [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Took 7.43 seconds to spawn the instance on the hypervisor. [ 894.273106] env[68569]: DEBUG nova.compute.manager [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 894.273891] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcdd85c4-7280-4334-860d-cead5c7a3031 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.305284] env[68569]: DEBUG nova.compute.manager [req-59687211-8a79-4446-ae9d-9cdc8625947f req-3ce67d61-1d2a-42a5-8b01-cf0fe1506a3c service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Received event network-changed-b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 894.305549] env[68569]: DEBUG nova.compute.manager [req-59687211-8a79-4446-ae9d-9cdc8625947f req-3ce67d61-1d2a-42a5-8b01-cf0fe1506a3c service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Refreshing instance network info cache due to event network-changed-b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 894.305775] env[68569]: DEBUG oslo_concurrency.lockutils [req-59687211-8a79-4446-ae9d-9cdc8625947f req-3ce67d61-1d2a-42a5-8b01-cf0fe1506a3c service nova] Acquiring lock "refresh_cache-db75de86-9dda-42b2-9e7a-55e2ba5adad1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.305923] env[68569]: DEBUG oslo_concurrency.lockutils [req-59687211-8a79-4446-ae9d-9cdc8625947f req-3ce67d61-1d2a-42a5-8b01-cf0fe1506a3c service nova] Acquired lock "refresh_cache-db75de86-9dda-42b2-9e7a-55e2ba5adad1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.306191] env[68569]: DEBUG nova.network.neutron [req-59687211-8a79-4446-ae9d-9cdc8625947f req-3ce67d61-1d2a-42a5-8b01-cf0fe1506a3c service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Refreshing network info cache for port b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 894.308935] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 7696390d-a097-4b6d-827d-92f3165a4188 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 894.563476] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167185, 'name': CreateVM_Task, 'duration_secs': 0.305124} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.563692] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 894.564395] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.564611] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.564929] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 894.565195] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0aad2563-d35c-4114-88ea-56c259e11c48 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.569353] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Waiting for the task: (returnval){ [ 894.569353] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e3ce6f-d5c5-a74b-3615-19666970658d" [ 894.569353] env[68569]: _type = "Task" [ 894.569353] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.576736] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e3ce6f-d5c5-a74b-3615-19666970658d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.792730] env[68569]: INFO nova.compute.manager [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Took 57.99 seconds to build instance. [ 894.813031] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance c0ea0ef8-93c2-416a-8caa-a51f7a39627e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 895.021594] env[68569]: DEBUG nova.network.neutron [req-59687211-8a79-4446-ae9d-9cdc8625947f req-3ce67d61-1d2a-42a5-8b01-cf0fe1506a3c service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Updated VIF entry in instance network info cache for port b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 895.021894] env[68569]: DEBUG nova.network.neutron [req-59687211-8a79-4446-ae9d-9cdc8625947f req-3ce67d61-1d2a-42a5-8b01-cf0fe1506a3c service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Updating instance_info_cache with network_info: [{"id": "b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9", "address": "fa:16:3e:61:3a:58", "network": {"id": "332fd870-afb5-4a9f-b1c9-d87cb33d29ba", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1915965597-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58942a8ed4f742328cf9a9ded530b266", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7baf7ab-5d", "ovs_interfaceid": "b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.081492] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e3ce6f-d5c5-a74b-3615-19666970658d, 'name': SearchDatastore_Task, 'duration_secs': 0.00963} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.081680] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.081754] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 895.082056] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.082211] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.082392] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.082678] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b36a84c4-1793-422b-acfb-990a2ca8a8fd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.091232] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.091405] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 895.092112] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-343c2b11-114c-4ff2-a971-8f5f051996f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.098045] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Waiting for the task: (returnval){ [ 895.098045] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a411bb-1271-bd75-bf6c-d1b24f572943" [ 895.098045] env[68569]: _type = "Task" [ 895.098045] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.105265] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a411bb-1271-bd75-bf6c-d1b24f572943, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.295152] env[68569]: DEBUG oslo_concurrency.lockutils [None req-63156fc3-7da5-4206-8160-847ce26dce1e tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Lock "a591b671-ca84-47b5-9831-63478d55fd07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.274s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.315871] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 2cf8803a-8078-4832-a736-330d6bcde6ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 895.316249] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 895.316448] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 895.526135] env[68569]: DEBUG oslo_concurrency.lockutils [req-59687211-8a79-4446-ae9d-9cdc8625947f req-3ce67d61-1d2a-42a5-8b01-cf0fe1506a3c service nova] Releasing lock "refresh_cache-db75de86-9dda-42b2-9e7a-55e2ba5adad1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.591675] env[68569]: DEBUG nova.compute.manager [req-c20d75d8-66fc-4c12-a751-1dff4cf43fe3 req-e6056b17-53af-48ad-ade6-0bb2dbca4af1 service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Received event network-changed-f270ffee-b36c-4ea6-bfca-2f839c671e61 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 895.591896] env[68569]: DEBUG nova.compute.manager [req-c20d75d8-66fc-4c12-a751-1dff4cf43fe3 req-e6056b17-53af-48ad-ade6-0bb2dbca4af1 service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Refreshing instance network info cache due to event network-changed-f270ffee-b36c-4ea6-bfca-2f839c671e61. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 895.592135] env[68569]: DEBUG oslo_concurrency.lockutils [req-c20d75d8-66fc-4c12-a751-1dff4cf43fe3 req-e6056b17-53af-48ad-ade6-0bb2dbca4af1 service nova] Acquiring lock "refresh_cache-a591b671-ca84-47b5-9831-63478d55fd07" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.592283] env[68569]: DEBUG oslo_concurrency.lockutils [req-c20d75d8-66fc-4c12-a751-1dff4cf43fe3 req-e6056b17-53af-48ad-ade6-0bb2dbca4af1 service nova] Acquired lock "refresh_cache-a591b671-ca84-47b5-9831-63478d55fd07" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.592485] env[68569]: DEBUG nova.network.neutron [req-c20d75d8-66fc-4c12-a751-1dff4cf43fe3 req-e6056b17-53af-48ad-ade6-0bb2dbca4af1 service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Refreshing network info cache for port f270ffee-b36c-4ea6-bfca-2f839c671e61 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 895.612897] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a411bb-1271-bd75-bf6c-d1b24f572943, 'name': SearchDatastore_Task, 'duration_secs': 0.008726} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.614337] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b2dfe16-78bd-42e8-98e5-3075cd14eda3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.619582] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Waiting for the task: (returnval){ [ 895.619582] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e9eca8-f68e-ef0e-d604-a19a80654a36" [ 895.619582] env[68569]: _type = "Task" [ 895.619582] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.629171] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e9eca8-f68e-ef0e-d604-a19a80654a36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.767139] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d212dd-bbe0-467f-875a-0bebde881af0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.776345] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024ebd0a-0c5b-491a-b778-5b27977ab713 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.805417] env[68569]: DEBUG nova.compute.manager [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 895.808536] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f09f63-ff01-4805-b53e-eb33a50fac64 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.815990] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d1ffcf-efc4-452d-bb92-f0ed823de530 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.834021] env[68569]: DEBUG nova.compute.provider_tree [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.132358] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e9eca8-f68e-ef0e-d604-a19a80654a36, 'name': SearchDatastore_Task, 'duration_secs': 0.010631} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.132760] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.133094] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] db75de86-9dda-42b2-9e7a-55e2ba5adad1/db75de86-9dda-42b2-9e7a-55e2ba5adad1.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 896.133379] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5da28963-8325-4d1e-8fa2-148c8138e57c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.139878] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Waiting for the task: (returnval){ [ 896.139878] env[68569]: value = "task-3167186" [ 896.139878] env[68569]: _type = "Task" [ 896.139878] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.149466] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': task-3167186, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.322446] env[68569]: DEBUG nova.network.neutron [req-c20d75d8-66fc-4c12-a751-1dff4cf43fe3 req-e6056b17-53af-48ad-ade6-0bb2dbca4af1 service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Updated VIF entry in instance network info cache for port f270ffee-b36c-4ea6-bfca-2f839c671e61. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 896.322873] env[68569]: DEBUG nova.network.neutron [req-c20d75d8-66fc-4c12-a751-1dff4cf43fe3 req-e6056b17-53af-48ad-ade6-0bb2dbca4af1 service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Updating instance_info_cache with network_info: [{"id": "f270ffee-b36c-4ea6-bfca-2f839c671e61", "address": "fa:16:3e:44:18:35", "network": {"id": "b2cbc274-881a-457c-8cf9-4bcf861f755a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-438306757-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec7a9d5c742c4b91891bf31270a92e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf270ffee-b3", "ovs_interfaceid": "f270ffee-b36c-4ea6-bfca-2f839c671e61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.329384] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.333385] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 896.651451] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': task-3167186, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457293} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.651712] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] db75de86-9dda-42b2-9e7a-55e2ba5adad1/db75de86-9dda-42b2-9e7a-55e2ba5adad1.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 896.651915] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 896.652175] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2285ec7c-9210-4fec-bc88-7eb6887f4795 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.658299] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Waiting for the task: (returnval){ [ 896.658299] env[68569]: value = "task-3167187" [ 896.658299] env[68569]: _type = "Task" [ 896.658299] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.665211] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': task-3167187, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.826285] env[68569]: DEBUG oslo_concurrency.lockutils [req-c20d75d8-66fc-4c12-a751-1dff4cf43fe3 req-e6056b17-53af-48ad-ade6-0bb2dbca4af1 service nova] Releasing lock "refresh_cache-a591b671-ca84-47b5-9831-63478d55fd07" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.838486] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 896.838749] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.114s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.839034] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.978s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.840530] env[68569]: INFO nova.compute.claims [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.843173] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 896.843329] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Cleaning up deleted instances {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11841}} [ 897.171774] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': task-3167187, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091136} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.172186] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 897.173517] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d17e810-c767-4da2-878c-524879f6a660 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.209755] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] db75de86-9dda-42b2-9e7a-55e2ba5adad1/db75de86-9dda-42b2-9e7a-55e2ba5adad1.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 897.210141] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8ce724f-7bf8-4b12-84b7-b42c48cdf610 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.242319] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Waiting for the task: (returnval){ [ 897.242319] env[68569]: value = "task-3167188" [ 897.242319] env[68569]: _type = "Task" [ 897.242319] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.253850] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': task-3167188, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.360063] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] There are 49 instances to clean {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11850}} [ 897.360380] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 60aa85f3-edac-40e0-ad31-a8f93219e380] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 897.705543] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f914b7-a6cb-4440-906a-d1565c42cac0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.713344] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d26db3-9e7c-4195-930f-eda2441d651d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.409820] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 57a63648-83e9-4f23-aebc-050e58149ce2] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 898.416608] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7880b7ab-2a5d-4f0f-b02d-db950e981e19 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.423864] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': task-3167188, 'name': ReconfigVM_Task, 'duration_secs': 0.281863} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.425469] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Reconfigured VM instance instance-00000043 to attach disk [datastore2] db75de86-9dda-42b2-9e7a-55e2ba5adad1/db75de86-9dda-42b2-9e7a-55e2ba5adad1.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 898.426049] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d338615c-24b8-409a-8165-2fe4e379ade8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.428360] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b24a40-53d3-4509-b63b-e62d30a63b40 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.441580] env[68569]: DEBUG nova.compute.provider_tree [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.444105] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Waiting for the task: (returnval){ [ 898.444105] env[68569]: value = "task-3167189" [ 898.444105] env[68569]: _type = "Task" [ 898.444105] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.913635] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 9eafa273-097b-48ac-ae5f-4f7a469ac861] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 898.945477] env[68569]: DEBUG nova.scheduler.client.report [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 898.958978] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': task-3167189, 'name': Rename_Task, 'duration_secs': 0.160259} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.958978] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 898.958978] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d6b8793-aebf-42e8-ab9c-be07f47c65f5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.965195] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Waiting for the task: (returnval){ [ 898.965195] env[68569]: value = "task-3167190" [ 898.965195] env[68569]: _type = "Task" [ 898.965195] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.973489] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': task-3167190, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.417314] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: cc5139e1-4601-4966-9224-25b8223b8a57] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 899.454243] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.615s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.454822] env[68569]: DEBUG nova.compute.manager [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 899.457291] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.236s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.457485] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.459492] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.859s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.461028] env[68569]: INFO nova.compute.claims [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 899.474490] env[68569]: DEBUG oslo_vmware.api [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': task-3167190, 'name': PowerOnVM_Task, 'duration_secs': 0.475559} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.474783] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 899.474973] env[68569]: INFO nova.compute.manager [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Took 7.72 seconds to spawn the instance on the hypervisor. [ 899.475184] env[68569]: DEBUG nova.compute.manager [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 899.476292] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4fb3bea-3bf5-427d-81e6-13ce4f1664e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.490888] env[68569]: INFO nova.scheduler.client.report [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Deleted allocations for instance de2b0206-0c73-4275-89ff-37199520dd71 [ 899.921220] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 50abc994-682a-40d6-ae77-601839b98793] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 899.968775] env[68569]: DEBUG nova.compute.utils [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 899.969040] env[68569]: DEBUG nova.compute.manager [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Not allocating networking since 'none' was specified. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 899.997707] env[68569]: INFO nova.compute.manager [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Took 60.00 seconds to build instance. [ 900.001032] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d6bca07a-9fc3-4e54-a1ea-6e2dd4497a5b tempest-ListServerFiltersTestJSON-456276387 tempest-ListServerFiltersTestJSON-456276387-project-member] Lock "de2b0206-0c73-4275-89ff-37199520dd71" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.288s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.424849] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 1d1bc559-54c1-4ee5-ba78-bc4fbdca2df6] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 900.429533] env[68569]: DEBUG nova.compute.manager [req-71350835-c5b4-492f-a064-2ca1086870b0 req-89b53f08-e41a-4182-8d18-3e09aa867024 service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Received event network-changed-b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 900.429643] env[68569]: DEBUG nova.compute.manager [req-71350835-c5b4-492f-a064-2ca1086870b0 req-89b53f08-e41a-4182-8d18-3e09aa867024 service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Refreshing instance network info cache due to event network-changed-b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 900.429838] env[68569]: DEBUG oslo_concurrency.lockutils [req-71350835-c5b4-492f-a064-2ca1086870b0 req-89b53f08-e41a-4182-8d18-3e09aa867024 service nova] Acquiring lock "refresh_cache-db75de86-9dda-42b2-9e7a-55e2ba5adad1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.429972] env[68569]: DEBUG oslo_concurrency.lockutils [req-71350835-c5b4-492f-a064-2ca1086870b0 req-89b53f08-e41a-4182-8d18-3e09aa867024 service nova] Acquired lock "refresh_cache-db75de86-9dda-42b2-9e7a-55e2ba5adad1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.430146] env[68569]: DEBUG nova.network.neutron [req-71350835-c5b4-492f-a064-2ca1086870b0 req-89b53f08-e41a-4182-8d18-3e09aa867024 service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Refreshing network info cache for port b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 900.473440] env[68569]: DEBUG nova.compute.manager [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 900.501169] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f4de5d32-a0d4-4bf6-881c-56cf5db065da tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Lock "db75de86-9dda-42b2-9e7a-55e2ba5adad1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.762s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.819222] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2964e854-2902-430e-b769-95a6f1bc600b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.827044] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c186aca2-7b15-4682-bdc5-6c5763067a01 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.856054] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3466b83-daec-459c-9598-29b573edede6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.863087] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd2fcf7-6574-418d-9815-ead7ded93477 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.877614] env[68569]: DEBUG nova.compute.provider_tree [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.932983] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 492c0fa1-f821-496a-86c2-f7686479a733] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 901.170293] env[68569]: DEBUG nova.network.neutron [req-71350835-c5b4-492f-a064-2ca1086870b0 req-89b53f08-e41a-4182-8d18-3e09aa867024 service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Updated VIF entry in instance network info cache for port b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 901.170714] env[68569]: DEBUG nova.network.neutron [req-71350835-c5b4-492f-a064-2ca1086870b0 req-89b53f08-e41a-4182-8d18-3e09aa867024 service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Updating instance_info_cache with network_info: [{"id": "b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9", "address": "fa:16:3e:61:3a:58", "network": {"id": "332fd870-afb5-4a9f-b1c9-d87cb33d29ba", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1915965597-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58942a8ed4f742328cf9a9ded530b266", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7baf7ab-5d", "ovs_interfaceid": "b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.380788] env[68569]: DEBUG nova.scheduler.client.report [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 901.438961] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 98efdafe-e02b-46ca-a701-b70042513128] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 901.484454] env[68569]: DEBUG nova.compute.manager [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 901.510621] env[68569]: DEBUG nova.virt.hardware [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 901.510903] env[68569]: DEBUG nova.virt.hardware [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 901.511035] env[68569]: DEBUG nova.virt.hardware [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 901.511223] env[68569]: DEBUG nova.virt.hardware [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 901.511368] env[68569]: DEBUG nova.virt.hardware [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 901.511512] env[68569]: DEBUG nova.virt.hardware [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 901.511713] env[68569]: DEBUG nova.virt.hardware [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 901.511866] env[68569]: DEBUG nova.virt.hardware [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 901.512065] env[68569]: DEBUG nova.virt.hardware [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 901.512229] env[68569]: DEBUG nova.virt.hardware [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 901.512394] env[68569]: DEBUG nova.virt.hardware [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 901.513252] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0524c905-ddda-479a-9b89-f5f02855c1a9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.522734] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a089bea-bc24-4f93-9e57-55135cf82118 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.537338] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 901.542734] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Creating folder: Project (bbff40c5e9fc4ff696a216efa3020131). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 901.543050] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96d8040b-79be-4549-a352-b1411f41479b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.552493] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Created folder: Project (bbff40c5e9fc4ff696a216efa3020131) in parent group-v633430. [ 901.552680] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Creating folder: Instances. Parent ref: group-v633626. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 901.552932] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5f2c540-5844-4669-baf6-dee422a8967e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.561673] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Created folder: Instances in parent group-v633626. [ 901.561897] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 901.562091] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 901.562292] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1c840cc-27b8-49b4-b2ec-345afbd0c770 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.577681] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 901.577681] env[68569]: value = "task-3167193" [ 901.577681] env[68569]: _type = "Task" [ 901.577681] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.584721] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167193, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.673625] env[68569]: DEBUG oslo_concurrency.lockutils [req-71350835-c5b4-492f-a064-2ca1086870b0 req-89b53f08-e41a-4182-8d18-3e09aa867024 service nova] Releasing lock "refresh_cache-db75de86-9dda-42b2-9e7a-55e2ba5adad1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.888746] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.889436] env[68569]: DEBUG nova.compute.manager [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 901.892183] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.186s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.893590] env[68569]: INFO nova.compute.claims [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 901.942518] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: ba22fe4b-2238-4748-8af5-8fc6bfe3b3ae] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 902.091050] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167193, 'name': CreateVM_Task, 'duration_secs': 0.283779} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.091050] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 902.091667] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.091994] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.092473] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 902.092879] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54113ed7-144f-4575-9939-ea01afeb742b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.098665] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 902.098665] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52df9c01-422e-c679-2af2-d671f92a1573" [ 902.098665] env[68569]: _type = "Task" [ 902.098665] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.112252] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52df9c01-422e-c679-2af2-d671f92a1573, 'name': SearchDatastore_Task, 'duration_secs': 0.00956} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.112652] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 902.112994] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 902.113371] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.113631] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.113934] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 902.114300] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b6f305c-7a51-42ea-9a53-8d169ddef2ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.122055] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 902.122215] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 902.122910] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39a4bb8f-e80f-4ab3-b126-9c98eec5e822 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.127748] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 902.127748] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5211fc21-8a20-03ee-398b-d11c1d47933e" [ 902.127748] env[68569]: _type = "Task" [ 902.127748] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.136353] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5211fc21-8a20-03ee-398b-d11c1d47933e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.398482] env[68569]: DEBUG nova.compute.utils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 902.401907] env[68569]: DEBUG nova.compute.manager [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 902.402146] env[68569]: DEBUG nova.network.neutron [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 902.446043] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: c634f7eb-2f71-473d-8f90-71d74edffecb] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 902.455739] env[68569]: DEBUG nova.policy [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a920d87217ea4a01a1d3f7e49e8ca69b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd8b2c0385c3b4eb59f5bfcb75a3ed1b5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 902.638350] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5211fc21-8a20-03ee-398b-d11c1d47933e, 'name': SearchDatastore_Task, 'duration_secs': 0.008318} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.639060] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-401d24d2-06c4-4bc4-9010-003f2308ae09 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.643995] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 902.643995] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f0a77d-216f-5008-1dbb-1fe59d65d3b3" [ 902.643995] env[68569]: _type = "Task" [ 902.643995] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.651121] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f0a77d-216f-5008-1dbb-1fe59d65d3b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.720996] env[68569]: DEBUG nova.network.neutron [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Successfully created port: 6454ea52-76d3-478e-b299-b8484a3f5300 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 902.902355] env[68569]: DEBUG nova.compute.manager [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 902.950644] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 64146253-16ab-4d95-83c9-31b74014a040] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 903.157478] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f0a77d-216f-5008-1dbb-1fe59d65d3b3, 'name': SearchDatastore_Task, 'duration_secs': 0.009471} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.157745] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.157996] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 559a1eff-8892-4dda-a540-4a053ae0ef2b/559a1eff-8892-4dda-a540-4a053ae0ef2b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 903.158317] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3cfad955-9874-4b66-a0c0-00988833ee4c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.167224] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 903.167224] env[68569]: value = "task-3167194" [ 903.167224] env[68569]: _type = "Task" [ 903.167224] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.176505] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167194, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.248258] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be4124a-2872-4d15-a83a-431fffa91aa7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.255437] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90917c06-a065-4d65-a829-247e8c0ecf26 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.285662] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8b608d-aecf-4422-95b0-97e444f99a4a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.292425] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6d0fc6-a0a1-43da-b028-c209fde016e5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.305820] env[68569]: DEBUG nova.compute.provider_tree [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.459770] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 9aa0dbe3-e31c-4a14-a4e2-0cf781aa99c0] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 903.677524] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167194, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.810032] env[68569]: DEBUG nova.scheduler.client.report [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 903.917998] env[68569]: DEBUG nova.compute.manager [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 903.947666] env[68569]: DEBUG nova.virt.hardware [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 903.948059] env[68569]: DEBUG nova.virt.hardware [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 903.948614] env[68569]: DEBUG nova.virt.hardware [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 903.948614] env[68569]: DEBUG nova.virt.hardware [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 903.948614] env[68569]: DEBUG nova.virt.hardware [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 903.948778] env[68569]: DEBUG nova.virt.hardware [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 903.949018] env[68569]: DEBUG nova.virt.hardware [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 903.949187] env[68569]: DEBUG nova.virt.hardware [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 903.949359] env[68569]: DEBUG nova.virt.hardware [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 903.949522] env[68569]: DEBUG nova.virt.hardware [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 903.949698] env[68569]: DEBUG nova.virt.hardware [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 903.950752] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f90b1a6-b92e-4da9-a071-c3ebfbd01622 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.960148] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731df1dd-4e32-42ae-9572-ec5fd97a0259 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.964414] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 16b6fafe-524d-482f-961b-10e3601ac4c2] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 904.121489] env[68569]: DEBUG nova.compute.manager [req-13f5f8f9-fc97-4896-b1dc-dde0eb0964ed req-697ee5a7-0332-4fed-a078-cc6a5e38c4f7 service nova] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Received event network-vif-plugged-6454ea52-76d3-478e-b299-b8484a3f5300 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 904.121489] env[68569]: DEBUG oslo_concurrency.lockutils [req-13f5f8f9-fc97-4896-b1dc-dde0eb0964ed req-697ee5a7-0332-4fed-a078-cc6a5e38c4f7 service nova] Acquiring lock "0943cfd5-33fb-4b02-9e4d-93f18385bdae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.121739] env[68569]: DEBUG oslo_concurrency.lockutils [req-13f5f8f9-fc97-4896-b1dc-dde0eb0964ed req-697ee5a7-0332-4fed-a078-cc6a5e38c4f7 service nova] Lock "0943cfd5-33fb-4b02-9e4d-93f18385bdae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.121797] env[68569]: DEBUG oslo_concurrency.lockutils [req-13f5f8f9-fc97-4896-b1dc-dde0eb0964ed req-697ee5a7-0332-4fed-a078-cc6a5e38c4f7 service nova] Lock "0943cfd5-33fb-4b02-9e4d-93f18385bdae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.121923] env[68569]: DEBUG nova.compute.manager [req-13f5f8f9-fc97-4896-b1dc-dde0eb0964ed req-697ee5a7-0332-4fed-a078-cc6a5e38c4f7 service nova] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] No waiting events found dispatching network-vif-plugged-6454ea52-76d3-478e-b299-b8484a3f5300 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 904.122106] env[68569]: WARNING nova.compute.manager [req-13f5f8f9-fc97-4896-b1dc-dde0eb0964ed req-697ee5a7-0332-4fed-a078-cc6a5e38c4f7 service nova] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Received unexpected event network-vif-plugged-6454ea52-76d3-478e-b299-b8484a3f5300 for instance with vm_state building and task_state spawning. [ 904.180580] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167194, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.807586} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.180834] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 559a1eff-8892-4dda-a540-4a053ae0ef2b/559a1eff-8892-4dda-a540-4a053ae0ef2b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 904.181048] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 904.181300] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a6c3ba7a-0d98-4f8e-87d2-74fb34221e29 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.187903] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 904.187903] env[68569]: value = "task-3167195" [ 904.187903] env[68569]: _type = "Task" [ 904.187903] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.195154] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167195, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.215634] env[68569]: DEBUG nova.network.neutron [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Successfully updated port: 6454ea52-76d3-478e-b299-b8484a3f5300 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 904.315096] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.423s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.315649] env[68569]: DEBUG nova.compute.manager [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 904.318232] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 45.274s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.318415] env[68569]: DEBUG nova.objects.instance [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68569) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 904.469543] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 0c4d4d93-89bf-4164-973b-af48278a3915] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 904.697358] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167195, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063043} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.697612] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 904.698368] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10659286-e0b9-49c5-a960-a200d17bf628 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.716993] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] 559a1eff-8892-4dda-a540-4a053ae0ef2b/559a1eff-8892-4dda-a540-4a053ae0ef2b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 904.717226] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca80f109-8ca7-471a-9ded-cc2e6b89f56a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.730927] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Acquiring lock "refresh_cache-0943cfd5-33fb-4b02-9e4d-93f18385bdae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.731089] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Acquired lock "refresh_cache-0943cfd5-33fb-4b02-9e4d-93f18385bdae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.731217] env[68569]: DEBUG nova.network.neutron [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 904.736997] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 904.736997] env[68569]: value = "task-3167196" [ 904.736997] env[68569]: _type = "Task" [ 904.736997] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.745309] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167196, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.826979] env[68569]: DEBUG nova.compute.utils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 904.828727] env[68569]: DEBUG nova.compute.manager [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 904.828954] env[68569]: DEBUG nova.network.neutron [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 904.868502] env[68569]: DEBUG nova.policy [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c9d77d804154a199681132cb34bf626', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8bd8ff748a34e7a83ec0edbfa148aac', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 904.973143] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 77b9756e-2299-47e2-a6d8-e8026e33a3de] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 905.151060] env[68569]: DEBUG nova.network.neutron [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Successfully created port: 948d6af6-c562-4b44-b852-89a32cf79dad {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 905.247672] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167196, 'name': ReconfigVM_Task, 'duration_secs': 0.302331} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.247672] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Reconfigured VM instance instance-00000044 to attach disk [datastore2] 559a1eff-8892-4dda-a540-4a053ae0ef2b/559a1eff-8892-4dda-a540-4a053ae0ef2b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 905.248302] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c99143f0-1bdc-4f30-be74-961d533b05da {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.255303] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 905.255303] env[68569]: value = "task-3167197" [ 905.255303] env[68569]: _type = "Task" [ 905.255303] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.264366] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167197, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.271157] env[68569]: DEBUG nova.network.neutron [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 905.327982] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bbf81eb7-c9e2-43c6-8162-b8f41069b9d2 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.328945] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.447s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.332302] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.003s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.336368] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.197s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.336835] env[68569]: INFO nova.compute.claims [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 905.341600] env[68569]: DEBUG nova.compute.manager [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 905.383038] env[68569]: INFO nova.scheduler.client.report [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleted allocations for instance ab021831-2cc3-4457-aa55-b55036c2a423 [ 905.431774] env[68569]: DEBUG nova.network.neutron [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Updating instance_info_cache with network_info: [{"id": "6454ea52-76d3-478e-b299-b8484a3f5300", "address": "fa:16:3e:43:0f:aa", "network": {"id": "57e796fc-60cd-41b4-8fc3-74d80ef1a016", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1529692511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8b2c0385c3b4eb59f5bfcb75a3ed1b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6454ea52-76", "ovs_interfaceid": "6454ea52-76d3-478e-b299-b8484a3f5300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.480674] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: eec09a1c-e8b2-4b6a-9545-e190e1f965d1] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 905.765486] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167197, 'name': Rename_Task, 'duration_secs': 0.135948} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.765799] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 905.766102] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4db1ce9c-ae3a-430a-aff5-dd102e7c22c5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.772578] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 905.772578] env[68569]: value = "task-3167198" [ 905.772578] env[68569]: _type = "Task" [ 905.772578] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.779895] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167198, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.890756] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ccd0fc02-1a9e-433a-a8da-4ced9b9a4d31 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "ab021831-2cc3-4457-aa55-b55036c2a423" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.404s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.933956] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Releasing lock "refresh_cache-0943cfd5-33fb-4b02-9e4d-93f18385bdae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 905.934337] env[68569]: DEBUG nova.compute.manager [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Instance network_info: |[{"id": "6454ea52-76d3-478e-b299-b8484a3f5300", "address": "fa:16:3e:43:0f:aa", "network": {"id": "57e796fc-60cd-41b4-8fc3-74d80ef1a016", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1529692511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8b2c0385c3b4eb59f5bfcb75a3ed1b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6454ea52-76", "ovs_interfaceid": "6454ea52-76d3-478e-b299-b8484a3f5300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 905.934756] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:0f:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6fb0104-186b-4288-b87e-634893f46f01', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6454ea52-76d3-478e-b299-b8484a3f5300', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 905.942101] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Creating folder: Project (d8b2c0385c3b4eb59f5bfcb75a3ed1b5). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 905.942676] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4a1d543-4371-4094-be89-0b8f890d1272 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.953234] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Created folder: Project (d8b2c0385c3b4eb59f5bfcb75a3ed1b5) in parent group-v633430. [ 905.953489] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Creating folder: Instances. Parent ref: group-v633629. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 905.953727] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a4387f3-a5ca-435e-b26b-8e7095af23ed {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.964296] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Created folder: Instances in parent group-v633629. [ 905.964558] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 905.964774] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 905.964991] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39a20b10-c763-4bd9-b138-f45c1f9119d1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.983910] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 2dcadbe8-fe23-4ee5-bf8a-5aa3cde1425c] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 905.985727] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 905.985727] env[68569]: value = "task-3167201" [ 905.985727] env[68569]: _type = "Task" [ 905.985727] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.993783] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167201, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.152190] env[68569]: DEBUG nova.compute.manager [req-536439cc-5bc4-4c47-8d5c-9e3c5a777148 req-7b54a8b1-438c-4a3d-8186-e71b21d0b6f8 service nova] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Received event network-changed-6454ea52-76d3-478e-b299-b8484a3f5300 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 906.152407] env[68569]: DEBUG nova.compute.manager [req-536439cc-5bc4-4c47-8d5c-9e3c5a777148 req-7b54a8b1-438c-4a3d-8186-e71b21d0b6f8 service nova] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Refreshing instance network info cache due to event network-changed-6454ea52-76d3-478e-b299-b8484a3f5300. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 906.152632] env[68569]: DEBUG oslo_concurrency.lockutils [req-536439cc-5bc4-4c47-8d5c-9e3c5a777148 req-7b54a8b1-438c-4a3d-8186-e71b21d0b6f8 service nova] Acquiring lock "refresh_cache-0943cfd5-33fb-4b02-9e4d-93f18385bdae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.152792] env[68569]: DEBUG oslo_concurrency.lockutils [req-536439cc-5bc4-4c47-8d5c-9e3c5a777148 req-7b54a8b1-438c-4a3d-8186-e71b21d0b6f8 service nova] Acquired lock "refresh_cache-0943cfd5-33fb-4b02-9e4d-93f18385bdae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.152971] env[68569]: DEBUG nova.network.neutron [req-536439cc-5bc4-4c47-8d5c-9e3c5a777148 req-7b54a8b1-438c-4a3d-8186-e71b21d0b6f8 service nova] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Refreshing network info cache for port 6454ea52-76d3-478e-b299-b8484a3f5300 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 906.283488] env[68569]: DEBUG oslo_vmware.api [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167198, 'name': PowerOnVM_Task, 'duration_secs': 0.497007} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.283767] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 906.283974] env[68569]: INFO nova.compute.manager [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Took 4.80 seconds to spawn the instance on the hypervisor. [ 906.284171] env[68569]: DEBUG nova.compute.manager [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 906.284947] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc21bf7-3dcb-4123-9045-b708a9f8a9d2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.357134] env[68569]: DEBUG nova.compute.manager [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 906.386433] env[68569]: DEBUG nova.virt.hardware [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 906.386683] env[68569]: DEBUG nova.virt.hardware [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 906.386831] env[68569]: DEBUG nova.virt.hardware [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 906.387563] env[68569]: DEBUG nova.virt.hardware [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 906.387563] env[68569]: DEBUG nova.virt.hardware [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 906.387563] env[68569]: DEBUG nova.virt.hardware [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 906.387563] env[68569]: DEBUG nova.virt.hardware [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 906.388640] env[68569]: DEBUG nova.virt.hardware [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 906.388866] env[68569]: DEBUG nova.virt.hardware [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 906.389108] env[68569]: DEBUG nova.virt.hardware [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 906.389234] env[68569]: DEBUG nova.virt.hardware [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 906.390143] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e1f335-470f-4f62-ac26-a515f49e026c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.403160] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2934a1-9dca-4460-941e-4b9c9f883b9b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.487972] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 6606b921-4f3a-44f5-ae4e-c600f26876fc] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 906.499307] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167201, 'name': CreateVM_Task, 'duration_secs': 0.304685} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.500050] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 906.500359] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.500516] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.500836] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 906.501459] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c9768a8-17a5-4a2b-a38e-65f7b8d45041 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.507838] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Waiting for the task: (returnval){ [ 906.507838] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ea56b3-dab5-6422-edaa-b6927e3ae000" [ 906.507838] env[68569]: _type = "Task" [ 906.507838] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.515906] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ea56b3-dab5-6422-edaa-b6927e3ae000, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.701263] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2540515c-89d8-4e1d-a78e-ec197adacb66 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.706264] env[68569]: DEBUG nova.network.neutron [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Successfully updated port: 948d6af6-c562-4b44-b852-89a32cf79dad {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 906.717215] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37e4eb7-bb2e-4702-a50d-bc11a7e02102 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.747299] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434048ab-95f8-47d4-b79f-e68248fd2aaf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.755103] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ad99ed-15c7-42b2-bfde-f281b9172b13 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.771734] env[68569]: DEBUG nova.compute.provider_tree [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 906.800928] env[68569]: INFO nova.compute.manager [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Took 56.95 seconds to build instance. [ 906.900547] env[68569]: DEBUG nova.network.neutron [req-536439cc-5bc4-4c47-8d5c-9e3c5a777148 req-7b54a8b1-438c-4a3d-8186-e71b21d0b6f8 service nova] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Updated VIF entry in instance network info cache for port 6454ea52-76d3-478e-b299-b8484a3f5300. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 906.900910] env[68569]: DEBUG nova.network.neutron [req-536439cc-5bc4-4c47-8d5c-9e3c5a777148 req-7b54a8b1-438c-4a3d-8186-e71b21d0b6f8 service nova] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Updating instance_info_cache with network_info: [{"id": "6454ea52-76d3-478e-b299-b8484a3f5300", "address": "fa:16:3e:43:0f:aa", "network": {"id": "57e796fc-60cd-41b4-8fc3-74d80ef1a016", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1529692511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8b2c0385c3b4eb59f5bfcb75a3ed1b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6454ea52-76", "ovs_interfaceid": "6454ea52-76d3-478e-b299-b8484a3f5300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.993628] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 912303de-a79d-41b0-ab44-c79e850a4dee] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 907.020487] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ea56b3-dab5-6422-edaa-b6927e3ae000, 'name': SearchDatastore_Task, 'duration_secs': 0.01237} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.020487] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.020487] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 907.020487] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.020618] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.020618] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 907.020702] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8abf2ca5-9e39-4b90-9750-7a30cb1dcc7e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.029104] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 907.029339] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 907.030057] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84c59a90-4254-4a72-910e-e1967cfc065e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.035128] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Waiting for the task: (returnval){ [ 907.035128] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f65ed6-b5b5-c8d5-bfd0-88cb346028ea" [ 907.035128] env[68569]: _type = "Task" [ 907.035128] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.043843] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f65ed6-b5b5-c8d5-bfd0-88cb346028ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.212025] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "refresh_cache-367f4fe5-ffef-45f3-b00e-a5cf0418d5cd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.212025] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "refresh_cache-367f4fe5-ffef-45f3-b00e-a5cf0418d5cd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.212185] env[68569]: DEBUG nova.network.neutron [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 907.296726] env[68569]: ERROR nova.scheduler.client.report [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [req-ba7bd1a3-cc1e-4f27-b5a5-6bc39f9f02fd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ba7bd1a3-cc1e-4f27-b5a5-6bc39f9f02fd"}]} [ 907.302430] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8a481a53-05b4-44c3-8482-3bf3d1384bfb tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Lock "559a1eff-8892-4dda-a540-4a053ae0ef2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.453s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.312251] env[68569]: DEBUG nova.scheduler.client.report [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 907.326240] env[68569]: DEBUG nova.scheduler.client.report [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 907.326542] env[68569]: DEBUG nova.compute.provider_tree [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 907.337163] env[68569]: DEBUG nova.scheduler.client.report [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 907.354988] env[68569]: DEBUG nova.scheduler.client.report [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 907.403283] env[68569]: DEBUG oslo_concurrency.lockutils [req-536439cc-5bc4-4c47-8d5c-9e3c5a777148 req-7b54a8b1-438c-4a3d-8186-e71b21d0b6f8 service nova] Releasing lock "refresh_cache-0943cfd5-33fb-4b02-9e4d-93f18385bdae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.498102] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 78f486aa-80f4-4d43-bd00-cc6206517a72] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 907.546415] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f65ed6-b5b5-c8d5-bfd0-88cb346028ea, 'name': SearchDatastore_Task, 'duration_secs': 0.008181} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.549902] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0946d8e1-ff31-4fca-92fc-fe6fa2492243 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.554489] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Waiting for the task: (returnval){ [ 907.554489] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524416e8-b808-5368-3c53-a1134eb73284" [ 907.554489] env[68569]: _type = "Task" [ 907.554489] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.563847] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524416e8-b808-5368-3c53-a1134eb73284, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.657009] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c420d864-199d-438b-95d9-f5eb3fb34bf6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.664790] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b6a7da-8a57-45bd-958a-a82b42fbc823 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.696132] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70ffe2a-a1e8-4225-849f-398914209000 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.703576] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6970dc22-d563-4949-9daf-6a597ae13b75 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.718765] env[68569]: DEBUG nova.compute.provider_tree [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 907.752264] env[68569]: DEBUG nova.network.neutron [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 907.933070] env[68569]: DEBUG nova.network.neutron [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Updating instance_info_cache with network_info: [{"id": "948d6af6-c562-4b44-b852-89a32cf79dad", "address": "fa:16:3e:9a:c3:6c", "network": {"id": "cf9f728e-bea5-495e-8933-c454fb6f87a7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1703203184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8bd8ff748a34e7a83ec0edbfa148aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap948d6af6-c5", "ovs_interfaceid": "948d6af6-c562-4b44-b852-89a32cf79dad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.003058] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: ec64b2fd-2409-4af1-8f51-cc0ccbba14f2] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 908.065252] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524416e8-b808-5368-3c53-a1134eb73284, 'name': SearchDatastore_Task, 'duration_secs': 0.009858} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.065519] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.065775] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 0943cfd5-33fb-4b02-9e4d-93f18385bdae/0943cfd5-33fb-4b02-9e4d-93f18385bdae.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 908.066039] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-937effa7-1a20-4632-9a5b-fd16c389a8c4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.072749] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Waiting for the task: (returnval){ [ 908.072749] env[68569]: value = "task-3167202" [ 908.072749] env[68569]: _type = "Task" [ 908.072749] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.080238] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': task-3167202, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.181589] env[68569]: DEBUG nova.compute.manager [req-ceffdbcc-3bad-4030-91ed-301c07aee37c req-88ea788f-42c1-427d-837b-31754578679c service nova] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Received event network-vif-plugged-948d6af6-c562-4b44-b852-89a32cf79dad {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 908.181833] env[68569]: DEBUG oslo_concurrency.lockutils [req-ceffdbcc-3bad-4030-91ed-301c07aee37c req-88ea788f-42c1-427d-837b-31754578679c service nova] Acquiring lock "367f4fe5-ffef-45f3-b00e-a5cf0418d5cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.182054] env[68569]: DEBUG oslo_concurrency.lockutils [req-ceffdbcc-3bad-4030-91ed-301c07aee37c req-88ea788f-42c1-427d-837b-31754578679c service nova] Lock "367f4fe5-ffef-45f3-b00e-a5cf0418d5cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.182226] env[68569]: DEBUG oslo_concurrency.lockutils [req-ceffdbcc-3bad-4030-91ed-301c07aee37c req-88ea788f-42c1-427d-837b-31754578679c service nova] Lock "367f4fe5-ffef-45f3-b00e-a5cf0418d5cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.182400] env[68569]: DEBUG nova.compute.manager [req-ceffdbcc-3bad-4030-91ed-301c07aee37c req-88ea788f-42c1-427d-837b-31754578679c service nova] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] No waiting events found dispatching network-vif-plugged-948d6af6-c562-4b44-b852-89a32cf79dad {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 908.182533] env[68569]: WARNING nova.compute.manager [req-ceffdbcc-3bad-4030-91ed-301c07aee37c req-88ea788f-42c1-427d-837b-31754578679c service nova] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Received unexpected event network-vif-plugged-948d6af6-c562-4b44-b852-89a32cf79dad for instance with vm_state building and task_state spawning. [ 908.182689] env[68569]: DEBUG nova.compute.manager [req-ceffdbcc-3bad-4030-91ed-301c07aee37c req-88ea788f-42c1-427d-837b-31754578679c service nova] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Received event network-changed-948d6af6-c562-4b44-b852-89a32cf79dad {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 908.182835] env[68569]: DEBUG nova.compute.manager [req-ceffdbcc-3bad-4030-91ed-301c07aee37c req-88ea788f-42c1-427d-837b-31754578679c service nova] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Refreshing instance network info cache due to event network-changed-948d6af6-c562-4b44-b852-89a32cf79dad. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 908.182999] env[68569]: DEBUG oslo_concurrency.lockutils [req-ceffdbcc-3bad-4030-91ed-301c07aee37c req-88ea788f-42c1-427d-837b-31754578679c service nova] Acquiring lock "refresh_cache-367f4fe5-ffef-45f3-b00e-a5cf0418d5cd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.253262] env[68569]: DEBUG nova.scheduler.client.report [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 107 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 908.255496] env[68569]: DEBUG nova.compute.provider_tree [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 107 to 108 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 908.255496] env[68569]: DEBUG nova.compute.provider_tree [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 908.435750] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "refresh_cache-367f4fe5-ffef-45f3-b00e-a5cf0418d5cd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.436126] env[68569]: DEBUG nova.compute.manager [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Instance network_info: |[{"id": "948d6af6-c562-4b44-b852-89a32cf79dad", "address": "fa:16:3e:9a:c3:6c", "network": {"id": "cf9f728e-bea5-495e-8933-c454fb6f87a7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1703203184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8bd8ff748a34e7a83ec0edbfa148aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap948d6af6-c5", "ovs_interfaceid": "948d6af6-c562-4b44-b852-89a32cf79dad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 908.438786] env[68569]: DEBUG oslo_concurrency.lockutils [req-ceffdbcc-3bad-4030-91ed-301c07aee37c req-88ea788f-42c1-427d-837b-31754578679c service nova] Acquired lock "refresh_cache-367f4fe5-ffef-45f3-b00e-a5cf0418d5cd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.438786] env[68569]: DEBUG nova.network.neutron [req-ceffdbcc-3bad-4030-91ed-301c07aee37c req-88ea788f-42c1-427d-837b-31754578679c service nova] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Refreshing network info cache for port 948d6af6-c562-4b44-b852-89a32cf79dad {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 908.438786] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:c3:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd33839ae-40ca-471b-92e3-eb282b920682', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '948d6af6-c562-4b44-b852-89a32cf79dad', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 908.446688] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 908.447603] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 908.447900] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8df05342-1298-4c9b-b1d3-c1fa8852160b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.474752] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 908.474752] env[68569]: value = "task-3167203" [ 908.474752] env[68569]: _type = "Task" [ 908.474752] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.485291] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167203, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.506729] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 7c887df0-4358-46c5-9682-0d4122e96d10] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 908.544129] env[68569]: INFO nova.compute.manager [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Rebuilding instance [ 908.583422] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': task-3167202, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.592891] env[68569]: DEBUG nova.compute.manager [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 908.593844] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee42740e-01a4-4600-a87c-bbeaadbc4521 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.758923] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.424s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.759481] env[68569]: DEBUG nova.compute.manager [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 908.763103] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.252s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.764410] env[68569]: INFO nova.compute.claims [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 908.986874] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167203, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.014212] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: bda7e09b-848b-4d5d-a49d-6e0639f22f99] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 909.083685] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': task-3167202, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.626651} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.086386] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 0943cfd5-33fb-4b02-9e4d-93f18385bdae/0943cfd5-33fb-4b02-9e4d-93f18385bdae.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 909.086610] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 909.086868] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a8125ce-d442-47b5-b3fa-1e858320ff32 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.093832] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Waiting for the task: (returnval){ [ 909.093832] env[68569]: value = "task-3167204" [ 909.093832] env[68569]: _type = "Task" [ 909.093832] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.101189] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': task-3167204, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.163975] env[68569]: DEBUG nova.network.neutron [req-ceffdbcc-3bad-4030-91ed-301c07aee37c req-88ea788f-42c1-427d-837b-31754578679c service nova] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Updated VIF entry in instance network info cache for port 948d6af6-c562-4b44-b852-89a32cf79dad. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 909.164374] env[68569]: DEBUG nova.network.neutron [req-ceffdbcc-3bad-4030-91ed-301c07aee37c req-88ea788f-42c1-427d-837b-31754578679c service nova] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Updating instance_info_cache with network_info: [{"id": "948d6af6-c562-4b44-b852-89a32cf79dad", "address": "fa:16:3e:9a:c3:6c", "network": {"id": "cf9f728e-bea5-495e-8933-c454fb6f87a7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1703203184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8bd8ff748a34e7a83ec0edbfa148aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap948d6af6-c5", "ovs_interfaceid": "948d6af6-c562-4b44-b852-89a32cf79dad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.269314] env[68569]: DEBUG nova.compute.utils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 909.272600] env[68569]: DEBUG nova.compute.manager [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 909.272786] env[68569]: DEBUG nova.network.neutron [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 909.310837] env[68569]: DEBUG nova.policy [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '330a73f609f746d8b8c1a7eefe557c69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'decd2576711b41bbb25300d9db62643e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 909.486428] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167203, 'name': CreateVM_Task, 'duration_secs': 0.568106} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.486570] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 909.487279] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.487457] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.487793] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 909.488077] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5ebf024-27c8-4e72-87f8-4a0add706f1b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.492236] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 909.492236] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52352124-37a9-6103-208f-f09ef3aa7795" [ 909.492236] env[68569]: _type = "Task" [ 909.492236] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.500592] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52352124-37a9-6103-208f-f09ef3aa7795, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.520139] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 2cde3729-1be6-42c5-891f-42a7a8bff267] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 909.598645] env[68569]: DEBUG nova.network.neutron [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Successfully created port: 5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 909.606063] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': task-3167204, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060886} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.606328] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 909.607092] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb1ae04-4ab9-4404-9a4f-77829baf5cd7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.609840] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 909.610343] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da73d5e5-fd8b-44a4-93d5-d3677a6b8551 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.632697] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 0943cfd5-33fb-4b02-9e4d-93f18385bdae/0943cfd5-33fb-4b02-9e4d-93f18385bdae.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 909.634282] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02ef99a4-5b9b-4241-8e67-490287038678 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.648509] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 909.648509] env[68569]: value = "task-3167205" [ 909.648509] env[68569]: _type = "Task" [ 909.648509] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.653605] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Waiting for the task: (returnval){ [ 909.653605] env[68569]: value = "task-3167206" [ 909.653605] env[68569]: _type = "Task" [ 909.653605] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.656789] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167205, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.665139] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': task-3167206, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.667779] env[68569]: DEBUG oslo_concurrency.lockutils [req-ceffdbcc-3bad-4030-91ed-301c07aee37c req-88ea788f-42c1-427d-837b-31754578679c service nova] Releasing lock "refresh_cache-367f4fe5-ffef-45f3-b00e-a5cf0418d5cd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.775525] env[68569]: DEBUG nova.compute.manager [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 910.002219] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52352124-37a9-6103-208f-f09ef3aa7795, 'name': SearchDatastore_Task, 'duration_secs': 0.010837} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.004628] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.004863] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 910.005116] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.005405] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.005581] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 910.006713] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-665b9d70-4a5d-4cff-ad94-dcd2bb1093de {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.014141] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 910.014141] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 910.015024] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c764b16d-b235-4ee1-981e-ec9d768ce050 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.021991] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 910.021991] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525fb38b-a23f-4c8c-f309-008a50b7ec66" [ 910.021991] env[68569]: _type = "Task" [ 910.021991] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.025350] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: a29854f9-0096-4b01-9350-bfddee84e2c2] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 910.031145] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525fb38b-a23f-4c8c-f309-008a50b7ec66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.123141] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa68af3-faef-47d2-9774-ecceac96d6b5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.131535] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4da0314-d1d5-46da-b914-4012d8fa8fcf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.167608] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ba4328-d860-4e76-a8f3-332d92fe2b76 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.174642] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167205, 'name': PowerOffVM_Task, 'duration_secs': 0.120763} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.179175] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 910.179845] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 910.180175] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': task-3167206, 'name': ReconfigVM_Task, 'duration_secs': 0.317353} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.180847] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a53ad32-df74-43c2-bfab-32a85691f404 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.184396] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0898a293-5702-46ed-9e9a-a8aa226c00c6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.187929] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 0943cfd5-33fb-4b02-9e4d-93f18385bdae/0943cfd5-33fb-4b02-9e4d-93f18385bdae.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 910.188507] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cccd2d25-f966-46c8-96bf-58c82f83c4f1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.194496] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 910.202881] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55da3afb-6269-4393-ab40-94a7c82b382f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.204598] env[68569]: DEBUG nova.compute.provider_tree [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.205967] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Waiting for the task: (returnval){ [ 910.205967] env[68569]: value = "task-3167207" [ 910.205967] env[68569]: _type = "Task" [ 910.205967] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.214618] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': task-3167207, 'name': Rename_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.232980] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 910.233430] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 910.233678] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Deleting the datastore file [datastore2] 559a1eff-8892-4dda-a540-4a053ae0ef2b {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 910.233971] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d8a0484-f5bd-4e1e-aee1-cdf9caeaff20 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.240600] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 910.240600] env[68569]: value = "task-3167209" [ 910.240600] env[68569]: _type = "Task" [ 910.240600] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.249643] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167209, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.529108] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: f6990fa5-e2c7-4b15-8e40-02cbbd7e75fc] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 910.534567] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525fb38b-a23f-4c8c-f309-008a50b7ec66, 'name': SearchDatastore_Task, 'duration_secs': 0.009056} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.535322] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15cb3875-e42c-4841-a0ee-a3ab517954b6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.541722] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 910.541722] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f1fab2-08bd-27a9-cd62-8d96e31f52ca" [ 910.541722] env[68569]: _type = "Task" [ 910.541722] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.549338] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f1fab2-08bd-27a9-cd62-8d96e31f52ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.707573] env[68569]: DEBUG nova.scheduler.client.report [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 910.720480] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': task-3167207, 'name': Rename_Task, 'duration_secs': 0.135518} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.720752] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 910.720993] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d9b8d69-c0aa-4c76-b8d6-b45decb33490 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.727992] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Waiting for the task: (returnval){ [ 910.727992] env[68569]: value = "task-3167210" [ 910.727992] env[68569]: _type = "Task" [ 910.727992] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.736559] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': task-3167210, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.750317] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167209, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102337} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.750604] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 910.750799] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 910.751010] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 910.788346] env[68569]: DEBUG nova.compute.manager [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 910.812709] env[68569]: DEBUG nova.virt.hardware [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 910.812950] env[68569]: DEBUG nova.virt.hardware [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 910.813119] env[68569]: DEBUG nova.virt.hardware [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 910.813313] env[68569]: DEBUG nova.virt.hardware [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 910.813458] env[68569]: DEBUG nova.virt.hardware [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 910.813599] env[68569]: DEBUG nova.virt.hardware [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 910.813844] env[68569]: DEBUG nova.virt.hardware [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 910.814018] env[68569]: DEBUG nova.virt.hardware [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 910.814190] env[68569]: DEBUG nova.virt.hardware [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 910.814350] env[68569]: DEBUG nova.virt.hardware [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 910.814519] env[68569]: DEBUG nova.virt.hardware [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 910.815427] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088c2241-b348-40b8-a621-4e137a17e946 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.823486] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb01d5f-ec6e-46ca-87f5-a9d7f598e169 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.027429] env[68569]: DEBUG nova.compute.manager [req-094e1735-f832-4469-895e-eb6da7b38a68 req-34aaa3f4-91ed-4090-888f-353695e08051 service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] Received event network-vif-plugged-5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 911.027683] env[68569]: DEBUG oslo_concurrency.lockutils [req-094e1735-f832-4469-895e-eb6da7b38a68 req-34aaa3f4-91ed-4090-888f-353695e08051 service nova] Acquiring lock "b91304c7-e74d-402b-865b-150e0057c895-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.027977] env[68569]: DEBUG oslo_concurrency.lockutils [req-094e1735-f832-4469-895e-eb6da7b38a68 req-34aaa3f4-91ed-4090-888f-353695e08051 service nova] Lock "b91304c7-e74d-402b-865b-150e0057c895-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.028227] env[68569]: DEBUG oslo_concurrency.lockutils [req-094e1735-f832-4469-895e-eb6da7b38a68 req-34aaa3f4-91ed-4090-888f-353695e08051 service nova] Lock "b91304c7-e74d-402b-865b-150e0057c895-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.028450] env[68569]: DEBUG nova.compute.manager [req-094e1735-f832-4469-895e-eb6da7b38a68 req-34aaa3f4-91ed-4090-888f-353695e08051 service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] No waiting events found dispatching network-vif-plugged-5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 911.028671] env[68569]: WARNING nova.compute.manager [req-094e1735-f832-4469-895e-eb6da7b38a68 req-34aaa3f4-91ed-4090-888f-353695e08051 service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] Received unexpected event network-vif-plugged-5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f for instance with vm_state building and task_state spawning. [ 911.031677] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: b2e6de60-b4e5-4030-bca7-355d17fec06d] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 911.052648] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f1fab2-08bd-27a9-cd62-8d96e31f52ca, 'name': SearchDatastore_Task, 'duration_secs': 0.010486} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.052648] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.052648] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd/367f4fe5-ffef-45f3-b00e-a5cf0418d5cd.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 911.052974] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eec5fbe8-7d4c-401a-bfe5-e54dcf09c719 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.060589] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 911.060589] env[68569]: value = "task-3167211" [ 911.060589] env[68569]: _type = "Task" [ 911.060589] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.068400] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167211, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.134058] env[68569]: DEBUG nova.network.neutron [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Successfully updated port: 5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 911.217093] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.453s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.217093] env[68569]: DEBUG nova.compute.manager [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 911.220647] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.138s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.220647] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.222363] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.698s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.223843] env[68569]: INFO nova.compute.claims [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 911.239246] env[68569]: DEBUG oslo_vmware.api [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': task-3167210, 'name': PowerOnVM_Task, 'duration_secs': 0.465354} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.239612] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 911.239612] env[68569]: INFO nova.compute.manager [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Took 7.32 seconds to spawn the instance on the hypervisor. [ 911.240747] env[68569]: DEBUG nova.compute.manager [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 911.240747] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a69d496-0792-4014-80bd-41e502f1c7e2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.251860] env[68569]: INFO nova.scheduler.client.report [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Deleted allocations for instance a63b06a1-c24e-4013-a1f4-b227732a1e05 [ 911.535331] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 303f41c0-7a19-48b2-a072-4f138f6f8156] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 911.571331] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167211, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497773} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.571522] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd/367f4fe5-ffef-45f3-b00e-a5cf0418d5cd.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 911.571724] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 911.571957] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-704d5a96-fd6f-42ad-994c-2fe3c66f721a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.577468] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 911.577468] env[68569]: value = "task-3167212" [ 911.577468] env[68569]: _type = "Task" [ 911.577468] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.584965] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167212, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.636593] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "refresh_cache-b91304c7-e74d-402b-865b-150e0057c895" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.636789] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "refresh_cache-b91304c7-e74d-402b-865b-150e0057c895" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.636878] env[68569]: DEBUG nova.network.neutron [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 911.728656] env[68569]: DEBUG nova.compute.utils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 911.732382] env[68569]: DEBUG nova.compute.manager [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 911.732578] env[68569]: DEBUG nova.network.neutron [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 911.758995] env[68569]: INFO nova.compute.manager [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Took 58.18 seconds to build instance. [ 911.764060] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9aa2f88c-d09a-4a9e-bc85-5463b5988a98 tempest-ServerMetadataTestJSON-269589289 tempest-ServerMetadataTestJSON-269589289-project-member] Lock "a63b06a1-c24e-4013-a1f4-b227732a1e05" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.423s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.782789] env[68569]: DEBUG nova.policy [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '715937edaba643339195f77bb00fe05d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '062b8ddb5f0d46d08425a66db32471be', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 911.788044] env[68569]: DEBUG nova.virt.hardware [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 911.788271] env[68569]: DEBUG nova.virt.hardware [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 911.788426] env[68569]: DEBUG nova.virt.hardware [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 911.788599] env[68569]: DEBUG nova.virt.hardware [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 911.788739] env[68569]: DEBUG nova.virt.hardware [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 911.788904] env[68569]: DEBUG nova.virt.hardware [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 911.789138] env[68569]: DEBUG nova.virt.hardware [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 911.789323] env[68569]: DEBUG nova.virt.hardware [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 911.789500] env[68569]: DEBUG nova.virt.hardware [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 911.789659] env[68569]: DEBUG nova.virt.hardware [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 911.789830] env[68569]: DEBUG nova.virt.hardware [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 911.790669] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97333ec0-572c-4285-9766-7e41438adfa0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.799144] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b48831-3d7f-46a7-b37a-fc041437c9a3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.812831] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 911.818633] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 911.818911] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 911.819158] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94350644-cda5-411a-9f74-a55d5b3b93ca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.836119] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 911.836119] env[68569]: value = "task-3167213" [ 911.836119] env[68569]: _type = "Task" [ 911.836119] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.845196] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167213, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.038915] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 239c1217-ba8e-474a-b02c-7d85e3ac92f4] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 912.088234] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167212, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060352} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.088582] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 912.089433] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d1aae5-d262-4dfa-9315-74cf9477bcba {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.113945] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd/367f4fe5-ffef-45f3-b00e-a5cf0418d5cd.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 912.115029] env[68569]: DEBUG nova.network.neutron [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Successfully created port: 67d7d837-0e5c-4bba-969a-779af4075541 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.117250] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bdf3b50-e906-447f-97d1-1f98be2ca593 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.137410] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 912.137410] env[68569]: value = "task-3167214" [ 912.137410] env[68569]: _type = "Task" [ 912.137410] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.148581] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167214, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.184954] env[68569]: DEBUG nova.network.neutron [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 912.233508] env[68569]: DEBUG nova.compute.manager [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 912.268935] env[68569]: DEBUG oslo_concurrency.lockutils [None req-273e97bf-12a8-4ab6-9159-89ebe97048ed tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Lock "0943cfd5-33fb-4b02-9e4d-93f18385bdae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.788s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.350974] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167213, 'name': CreateVM_Task, 'duration_secs': 0.350077} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.351482] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 912.352621] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.352621] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.352621] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 912.352621] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b69b11bc-93e1-43dc-ad49-ec6c7891223d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.356965] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 912.356965] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52464c33-de91-39dc-67b1-61505d277314" [ 912.356965] env[68569]: _type = "Task" [ 912.356965] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.369457] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52464c33-de91-39dc-67b1-61505d277314, 'name': SearchDatastore_Task, 'duration_secs': 0.009627} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.371017] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.371017] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 912.371017] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.371017] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.371317] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 912.371317] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d0ab316-dbee-442a-9150-f29c903775aa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.381589] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 912.381887] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 912.382841] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a626448-a523-48e1-8be5-0d59f1152288 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.389341] env[68569]: DEBUG nova.network.neutron [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Updating instance_info_cache with network_info: [{"id": "5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f", "address": "fa:16:3e:58:35:5f", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c9a2dbd-a7", "ovs_interfaceid": "5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.398621] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 912.398621] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ab6f98-f017-6104-ea37-114bd3f74331" [ 912.398621] env[68569]: _type = "Task" [ 912.398621] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.406494] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ab6f98-f017-6104-ea37-114bd3f74331, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.541819] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 9bb06e82-cc5c-4673-b1f6-aae87568aa9c] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 912.647759] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167214, 'name': ReconfigVM_Task, 'duration_secs': 0.30021} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.648967] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd/367f4fe5-ffef-45f3-b00e-a5cf0418d5cd.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 912.650370] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb17107-fabe-4568-bc4e-59d8917aa0ed {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.653100] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ae73e38-7bc7-4ec0-b893-4a47150d34dc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.659413] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e62045-5c7a-417f-90b8-3690b7ae1af8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.663615] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 912.663615] env[68569]: value = "task-3167215" [ 912.663615] env[68569]: _type = "Task" [ 912.663615] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.694804] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b115d8-cabe-4494-80d9-224fc8ca64ef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.700879] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167215, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.706040] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2cdff25-0364-4fcd-81c1-28b60f68b0d7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.720230] env[68569]: DEBUG nova.compute.provider_tree [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.754021] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Acquiring lock "0943cfd5-33fb-4b02-9e4d-93f18385bdae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.754021] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Lock "0943cfd5-33fb-4b02-9e4d-93f18385bdae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.754021] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Acquiring lock "0943cfd5-33fb-4b02-9e4d-93f18385bdae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.754021] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Lock "0943cfd5-33fb-4b02-9e4d-93f18385bdae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.754326] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Lock "0943cfd5-33fb-4b02-9e4d-93f18385bdae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.758020] env[68569]: INFO nova.compute.manager [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Terminating instance [ 912.891242] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "refresh_cache-b91304c7-e74d-402b-865b-150e0057c895" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.892030] env[68569]: DEBUG nova.compute.manager [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Instance network_info: |[{"id": "5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f", "address": "fa:16:3e:58:35:5f", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c9a2dbd-a7", "ovs_interfaceid": "5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 912.892314] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:35:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db328342-7107-4bac-b1d6-111fbd5780f1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 912.904955] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 912.905818] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b91304c7-e74d-402b-865b-150e0057c895] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 912.910731] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55b5cf9f-a7dd-4e0e-8e09-d3b2084dee16 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.943813] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ab6f98-f017-6104-ea37-114bd3f74331, 'name': SearchDatastore_Task, 'duration_secs': 0.018879} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.945632] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 912.945632] env[68569]: value = "task-3167216" [ 912.945632] env[68569]: _type = "Task" [ 912.945632] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.945919] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46dc25fd-12c3-4a4a-9a03-a3d1a01d1b9d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.957985] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 912.957985] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bd32a6-0f06-bf6d-3404-1a686f539855" [ 912.957985] env[68569]: _type = "Task" [ 912.957985] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.958264] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167216, 'name': CreateVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.965352] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bd32a6-0f06-bf6d-3404-1a686f539855, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.046122] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: c9264123-ab19-40d5-959a-791b8966d2f6] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 913.067307] env[68569]: DEBUG nova.compute.manager [req-5f61705e-9093-4d0a-9944-08676d66a06a req-d6759ecd-1770-4b4a-9ada-1025fed8b7f4 service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] Received event network-changed-5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 913.068628] env[68569]: DEBUG nova.compute.manager [req-5f61705e-9093-4d0a-9944-08676d66a06a req-d6759ecd-1770-4b4a-9ada-1025fed8b7f4 service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] Refreshing instance network info cache due to event network-changed-5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 913.068918] env[68569]: DEBUG oslo_concurrency.lockutils [req-5f61705e-9093-4d0a-9944-08676d66a06a req-d6759ecd-1770-4b4a-9ada-1025fed8b7f4 service nova] Acquiring lock "refresh_cache-b91304c7-e74d-402b-865b-150e0057c895" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.069120] env[68569]: DEBUG oslo_concurrency.lockutils [req-5f61705e-9093-4d0a-9944-08676d66a06a req-d6759ecd-1770-4b4a-9ada-1025fed8b7f4 service nova] Acquired lock "refresh_cache-b91304c7-e74d-402b-865b-150e0057c895" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.069333] env[68569]: DEBUG nova.network.neutron [req-5f61705e-9093-4d0a-9944-08676d66a06a req-d6759ecd-1770-4b4a-9ada-1025fed8b7f4 service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] Refreshing network info cache for port 5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 913.174757] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167215, 'name': Rename_Task, 'duration_secs': 0.135901} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.175179] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 913.175518] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-faffa688-84dd-4423-9b90-fd8c93103707 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.182251] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 913.182251] env[68569]: value = "task-3167217" [ 913.182251] env[68569]: _type = "Task" [ 913.182251] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.191773] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167217, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.224040] env[68569]: DEBUG nova.scheduler.client.report [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 913.248174] env[68569]: DEBUG nova.compute.manager [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 913.259235] env[68569]: DEBUG nova.compute.manager [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 913.259539] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 913.260750] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f297a866-9321-426c-ad89-124602f76a8d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.268963] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 913.269470] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5cdb6c0e-286c-481d-94d2-945da8aba1df {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.275484] env[68569]: DEBUG nova.virt.hardware [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 913.275708] env[68569]: DEBUG nova.virt.hardware [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.275859] env[68569]: DEBUG nova.virt.hardware [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 913.276053] env[68569]: DEBUG nova.virt.hardware [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.276204] env[68569]: DEBUG nova.virt.hardware [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 913.276351] env[68569]: DEBUG nova.virt.hardware [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 913.276569] env[68569]: DEBUG nova.virt.hardware [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 913.276726] env[68569]: DEBUG nova.virt.hardware [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 913.276892] env[68569]: DEBUG nova.virt.hardware [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 913.277062] env[68569]: DEBUG nova.virt.hardware [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 913.277239] env[68569]: DEBUG nova.virt.hardware [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 913.278268] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8cd375b-3411-4743-b352-95fc5473e86c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.283198] env[68569]: DEBUG oslo_vmware.api [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Waiting for the task: (returnval){ [ 913.283198] env[68569]: value = "task-3167218" [ 913.283198] env[68569]: _type = "Task" [ 913.283198] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.289272] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92275d5f-82b9-4019-9580-18563662e8b9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.295610] env[68569]: DEBUG oslo_vmware.api [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': task-3167218, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.458013] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167216, 'name': CreateVM_Task, 'duration_secs': 0.335325} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.458176] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b91304c7-e74d-402b-865b-150e0057c895] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 913.458833] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.459091] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.459511] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 913.459632] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af3de2cd-9a8f-4781-8574-777cf1a0530f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.467366] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 913.467366] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a53526-d1d5-7680-f7a2-6ea7536b11ed" [ 913.467366] env[68569]: _type = "Task" [ 913.467366] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.471173] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bd32a6-0f06-bf6d-3404-1a686f539855, 'name': SearchDatastore_Task, 'duration_secs': 0.012042} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.476936] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.476936] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 559a1eff-8892-4dda-a540-4a053ae0ef2b/559a1eff-8892-4dda-a540-4a053ae0ef2b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 913.476936] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f8eefe0-b4c9-406d-abd1-981a78dfc7c8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.481973] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a53526-d1d5-7680-f7a2-6ea7536b11ed, 'name': SearchDatastore_Task, 'duration_secs': 0.010227} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.484352] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.484580] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 913.484810] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.484955] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.485144] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 913.485539] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 913.485539] env[68569]: value = "task-3167219" [ 913.485539] env[68569]: _type = "Task" [ 913.485539] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.485751] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9bd573d-6d70-4cdb-9333-d05dd7b98d9d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.495883] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167219, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.497682] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 913.498139] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 913.498854] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ce8563f-882b-4c9b-b14e-f6cfd29b3b40 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.504894] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 913.504894] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52865c86-d7b0-4485-4ea6-24dde30577d0" [ 913.504894] env[68569]: _type = "Task" [ 913.504894] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.513299] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52865c86-d7b0-4485-4ea6-24dde30577d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.549672] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 40b768c1-9007-4f78-a90f-61b2ac64553f] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 913.697622] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167217, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.729427] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.507s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.729958] env[68569]: DEBUG nova.compute.manager [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 913.733122] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.649s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.734761] env[68569]: INFO nova.compute.claims [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.793109] env[68569]: DEBUG oslo_vmware.api [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': task-3167218, 'name': PowerOffVM_Task, 'duration_secs': 0.179261} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.793374] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 913.793575] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 913.793831] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-73c20197-6ead-4d2f-a977-f0ab04f6637a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.830670] env[68569]: DEBUG nova.network.neutron [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Successfully updated port: 67d7d837-0e5c-4bba-969a-779af4075541 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 913.864021] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 913.864021] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 913.864021] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Deleting the datastore file [datastore2] 0943cfd5-33fb-4b02-9e4d-93f18385bdae {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 913.864021] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-70908b24-539a-4b25-a006-45aad029b8fc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.869369] env[68569]: DEBUG oslo_vmware.api [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Waiting for the task: (returnval){ [ 913.869369] env[68569]: value = "task-3167221" [ 913.869369] env[68569]: _type = "Task" [ 913.869369] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.884024] env[68569]: DEBUG oslo_vmware.api [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': task-3167221, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.934396] env[68569]: DEBUG nova.network.neutron [req-5f61705e-9093-4d0a-9944-08676d66a06a req-d6759ecd-1770-4b4a-9ada-1025fed8b7f4 service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] Updated VIF entry in instance network info cache for port 5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 913.934763] env[68569]: DEBUG nova.network.neutron [req-5f61705e-9093-4d0a-9944-08676d66a06a req-d6759ecd-1770-4b4a-9ada-1025fed8b7f4 service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] Updating instance_info_cache with network_info: [{"id": "5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f", "address": "fa:16:3e:58:35:5f", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c9a2dbd-a7", "ovs_interfaceid": "5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.999492] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167219, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.016525] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52865c86-d7b0-4485-4ea6-24dde30577d0, 'name': SearchDatastore_Task, 'duration_secs': 0.009729} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.017451] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5abb86e-85fe-41c2-b7dc-62ace03c6e32 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.025402] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 914.025402] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d2b015-6457-ab42-a503-29d1f9191f62" [ 914.025402] env[68569]: _type = "Task" [ 914.025402] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.034986] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d2b015-6457-ab42-a503-29d1f9191f62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.056774] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: c56e4282-b1ca-42f5-b346-692779475df0] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 914.197525] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167217, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.236439] env[68569]: DEBUG nova.compute.utils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 914.236439] env[68569]: DEBUG nova.compute.manager [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 914.236439] env[68569]: DEBUG nova.network.neutron [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 914.278020] env[68569]: DEBUG nova.policy [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afebab35cda9438781e2b466ce586405', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cc82d358e214a959ae6b34c33344b86', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 914.337897] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "refresh_cache-3551627b-9c90-43ea-bae7-d186eaa53c6b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.338098] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquired lock "refresh_cache-3551627b-9c90-43ea-bae7-d186eaa53c6b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.338261] env[68569]: DEBUG nova.network.neutron [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 914.383351] env[68569]: DEBUG oslo_vmware.api [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Task: {'id': task-3167221, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.294559} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.383587] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 914.383587] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 914.383896] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 914.383896] env[68569]: INFO nova.compute.manager [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Took 1.12 seconds to destroy the instance on the hypervisor. [ 914.384290] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 914.384381] env[68569]: DEBUG nova.compute.manager [-] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 914.384445] env[68569]: DEBUG nova.network.neutron [-] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 914.437517] env[68569]: DEBUG oslo_concurrency.lockutils [req-5f61705e-9093-4d0a-9944-08676d66a06a req-d6759ecd-1770-4b4a-9ada-1025fed8b7f4 service nova] Releasing lock "refresh_cache-b91304c7-e74d-402b-865b-150e0057c895" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.500242] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167219, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.716666} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.500289] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 559a1eff-8892-4dda-a540-4a053ae0ef2b/559a1eff-8892-4dda-a540-4a053ae0ef2b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 914.500684] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 914.500791] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e05ca47-d7d0-40f9-8dce-1249a4672d40 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.507925] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 914.507925] env[68569]: value = "task-3167222" [ 914.507925] env[68569]: _type = "Task" [ 914.507925] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.518380] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167222, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.538447] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d2b015-6457-ab42-a503-29d1f9191f62, 'name': SearchDatastore_Task, 'duration_secs': 0.018974} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.538447] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.538447] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] b91304c7-e74d-402b-865b-150e0057c895/b91304c7-e74d-402b-865b-150e0057c895.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 914.538447] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e4fe486-bcb0-417b-aed6-aa9989f44111 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.543580] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 914.543580] env[68569]: value = "task-3167223" [ 914.543580] env[68569]: _type = "Task" [ 914.543580] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.554255] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167223, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.563577] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 39a84212-2e52-4dba-b00c-5689564deaf4] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 914.691304] env[68569]: DEBUG nova.network.neutron [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Successfully created port: 3df34aa7-452f-49b6-9a1a-0e902948e0c6 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 914.697472] env[68569]: DEBUG oslo_vmware.api [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167217, 'name': PowerOnVM_Task, 'duration_secs': 1.172886} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.699211] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 914.699822] env[68569]: INFO nova.compute.manager [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Took 8.34 seconds to spawn the instance on the hypervisor. [ 914.700872] env[68569]: DEBUG nova.compute.manager [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 914.702133] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5dd8ae-fa49-4a17-828c-e0a8cf2163a2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.740949] env[68569]: DEBUG nova.compute.manager [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 914.875536] env[68569]: DEBUG nova.network.neutron [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 915.017208] env[68569]: DEBUG nova.network.neutron [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Updating instance_info_cache with network_info: [{"id": "67d7d837-0e5c-4bba-969a-779af4075541", "address": "fa:16:3e:4e:e5:8f", "network": {"id": "7738c508-0306-4079-a892-0554c562ce54", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1732251555-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "062b8ddb5f0d46d08425a66db32471be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67d7d837-0e", "ovs_interfaceid": "67d7d837-0e5c-4bba-969a-779af4075541", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.021376] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167222, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08808} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.021852] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 915.022613] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500ff7e1-3386-4307-bbc2-949e95c3e18d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.045171] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 559a1eff-8892-4dda-a540-4a053ae0ef2b/559a1eff-8892-4dda-a540-4a053ae0ef2b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 915.048181] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06b67005-f710-4f4d-a3c8-d50d4206af51 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.067303] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: ad207187-634f-4e7f-9809-eb3f742ddeec] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 915.071941] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167223, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.075767] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 915.075767] env[68569]: value = "task-3167224" [ 915.075767] env[68569]: _type = "Task" [ 915.075767] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.083746] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167224, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.097804] env[68569]: DEBUG nova.compute.manager [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Received event network-vif-plugged-67d7d837-0e5c-4bba-969a-779af4075541 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 915.098016] env[68569]: DEBUG oslo_concurrency.lockutils [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] Acquiring lock "3551627b-9c90-43ea-bae7-d186eaa53c6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.098224] env[68569]: DEBUG oslo_concurrency.lockutils [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] Lock "3551627b-9c90-43ea-bae7-d186eaa53c6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.098387] env[68569]: DEBUG oslo_concurrency.lockutils [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] Lock "3551627b-9c90-43ea-bae7-d186eaa53c6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.098558] env[68569]: DEBUG nova.compute.manager [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] No waiting events found dispatching network-vif-plugged-67d7d837-0e5c-4bba-969a-779af4075541 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 915.098716] env[68569]: WARNING nova.compute.manager [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Received unexpected event network-vif-plugged-67d7d837-0e5c-4bba-969a-779af4075541 for instance with vm_state building and task_state spawning. [ 915.098874] env[68569]: DEBUG nova.compute.manager [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Received event network-changed-67d7d837-0e5c-4bba-969a-779af4075541 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 915.099077] env[68569]: DEBUG nova.compute.manager [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Refreshing instance network info cache due to event network-changed-67d7d837-0e5c-4bba-969a-779af4075541. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 915.099262] env[68569]: DEBUG oslo_concurrency.lockutils [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] Acquiring lock "refresh_cache-3551627b-9c90-43ea-bae7-d186eaa53c6b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.112431] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd849e8c-f544-485e-92df-fdc807086668 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.119220] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1e83b5-305d-4614-8aeb-6bf77f320bec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.149795] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1a3c16-4d55-42fd-b69e-b34225bb1010 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.156678] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28739994-7d11-45fb-b6b7-74f62d459d43 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.170575] env[68569]: DEBUG nova.compute.provider_tree [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.202123] env[68569]: DEBUG nova.network.neutron [-] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.226517] env[68569]: INFO nova.compute.manager [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Took 59.56 seconds to build instance. [ 915.522645] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Releasing lock "refresh_cache-3551627b-9c90-43ea-bae7-d186eaa53c6b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.523147] env[68569]: DEBUG nova.compute.manager [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Instance network_info: |[{"id": "67d7d837-0e5c-4bba-969a-779af4075541", "address": "fa:16:3e:4e:e5:8f", "network": {"id": "7738c508-0306-4079-a892-0554c562ce54", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1732251555-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "062b8ddb5f0d46d08425a66db32471be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67d7d837-0e", "ovs_interfaceid": "67d7d837-0e5c-4bba-969a-779af4075541", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 915.523581] env[68569]: DEBUG oslo_concurrency.lockutils [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] Acquired lock "refresh_cache-3551627b-9c90-43ea-bae7-d186eaa53c6b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.523811] env[68569]: DEBUG nova.network.neutron [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Refreshing network info cache for port 67d7d837-0e5c-4bba-969a-779af4075541 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 915.525220] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:e5:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f52a458-d157-48a3-b4e2-b8cc0779afe2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67d7d837-0e5c-4bba-969a-779af4075541', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 915.533067] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 915.534202] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 915.534451] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72cdcb81-70df-4636-a9fd-4d449dd1d355 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.561404] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167223, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.562864] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 915.562864] env[68569]: value = "task-3167225" [ 915.562864] env[68569]: _type = "Task" [ 915.562864] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.572586] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167225, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.574209] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 50b9775c-ddbd-4e8f-a2b8-b08c3028fc28] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 915.584641] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167224, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.673963] env[68569]: DEBUG nova.scheduler.client.report [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 915.704912] env[68569]: INFO nova.compute.manager [-] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Took 1.32 seconds to deallocate network for instance. [ 915.727623] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aca4ccb2-df3e-4a1d-a58c-eb1b987c9773 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "367f4fe5-ffef-45f3-b00e-a5cf0418d5cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.853s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.752433] env[68569]: DEBUG nova.compute.manager [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 915.778635] env[68569]: DEBUG nova.virt.hardware [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 915.778960] env[68569]: DEBUG nova.virt.hardware [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 915.779070] env[68569]: DEBUG nova.virt.hardware [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 915.779239] env[68569]: DEBUG nova.virt.hardware [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 915.779384] env[68569]: DEBUG nova.virt.hardware [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 915.779532] env[68569]: DEBUG nova.virt.hardware [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 915.779741] env[68569]: DEBUG nova.virt.hardware [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 915.779898] env[68569]: DEBUG nova.virt.hardware [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 915.780073] env[68569]: DEBUG nova.virt.hardware [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 915.780239] env[68569]: DEBUG nova.virt.hardware [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 915.780408] env[68569]: DEBUG nova.virt.hardware [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 915.781280] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d820237-ff90-47fc-9247-85c920cd6fb2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.789615] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1a2a29-1c9d-4fa2-a869-128ad40eb076 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.010085] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d5ac0797-ebe8-4216-bc5a-3e588f5536b0 tempest-ServersAdminTestJSON-796867534 tempest-ServersAdminTestJSON-796867534-project-admin] Acquiring lock "refresh_cache-367f4fe5-ffef-45f3-b00e-a5cf0418d5cd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.010152] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d5ac0797-ebe8-4216-bc5a-3e588f5536b0 tempest-ServersAdminTestJSON-796867534 tempest-ServersAdminTestJSON-796867534-project-admin] Acquired lock "refresh_cache-367f4fe5-ffef-45f3-b00e-a5cf0418d5cd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.010295] env[68569]: DEBUG nova.network.neutron [None req-d5ac0797-ebe8-4216-bc5a-3e588f5536b0 tempest-ServersAdminTestJSON-796867534 tempest-ServersAdminTestJSON-796867534-project-admin] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 916.061201] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167223, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.092965} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.061367] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] b91304c7-e74d-402b-865b-150e0057c895/b91304c7-e74d-402b-865b-150e0057c895.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 916.061577] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 916.061835] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1505c8fb-3794-4df3-a09a-7a4ecba73037 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.073669] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167225, 'name': CreateVM_Task, 'duration_secs': 0.346453} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.074742] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 916.075087] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 916.075087] env[68569]: value = "task-3167226" [ 916.075087] env[68569]: _type = "Task" [ 916.075087] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.075732] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.076090] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.076238] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 916.076535] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-404b300f-89fd-4d74-82cb-93ce6a3f113d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.084625] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: e77cc179-1f3d-4095-a491-48df7f79bdb9] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 916.090388] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 916.090388] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5282b08e-1bfb-a285-9b30-d4e600a22e3c" [ 916.090388] env[68569]: _type = "Task" [ 916.090388] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.096069] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167226, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.096290] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167224, 'name': ReconfigVM_Task, 'duration_secs': 0.813618} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.099735] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 559a1eff-8892-4dda-a540-4a053ae0ef2b/559a1eff-8892-4dda-a540-4a053ae0ef2b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 916.100398] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2cfb7add-217c-4e26-ab3b-6732b63f7dbb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.106403] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5282b08e-1bfb-a285-9b30-d4e600a22e3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.107717] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 916.107717] env[68569]: value = "task-3167227" [ 916.107717] env[68569]: _type = "Task" [ 916.107717] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.117111] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167227, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.180525] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.446s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.180525] env[68569]: DEBUG nova.compute.manager [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 916.184839] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.251s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.185241] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.189307] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.758s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.189653] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.192393] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.769s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.195185] env[68569]: INFO nova.compute.claims [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 916.211908] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.236757] env[68569]: INFO nova.scheduler.client.report [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Deleted allocations for instance fb97d2dd-d42a-42e8-9a36-5c913a58b891 [ 916.238732] env[68569]: INFO nova.scheduler.client.report [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Deleted allocations for instance dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53 [ 916.273618] env[68569]: DEBUG nova.compute.manager [req-37144931-7a64-463c-941a-043ae84ca4fe req-3cab52d4-acaf-4a9c-9993-ef6e1c90e22b service nova] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Received event network-vif-plugged-3df34aa7-452f-49b6-9a1a-0e902948e0c6 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 916.273887] env[68569]: DEBUG oslo_concurrency.lockutils [req-37144931-7a64-463c-941a-043ae84ca4fe req-3cab52d4-acaf-4a9c-9993-ef6e1c90e22b service nova] Acquiring lock "c0211ed8-5995-48f4-b339-99bd4c93254c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.274349] env[68569]: DEBUG oslo_concurrency.lockutils [req-37144931-7a64-463c-941a-043ae84ca4fe req-3cab52d4-acaf-4a9c-9993-ef6e1c90e22b service nova] Lock "c0211ed8-5995-48f4-b339-99bd4c93254c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.274533] env[68569]: DEBUG oslo_concurrency.lockutils [req-37144931-7a64-463c-941a-043ae84ca4fe req-3cab52d4-acaf-4a9c-9993-ef6e1c90e22b service nova] Lock "c0211ed8-5995-48f4-b339-99bd4c93254c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.274700] env[68569]: DEBUG nova.compute.manager [req-37144931-7a64-463c-941a-043ae84ca4fe req-3cab52d4-acaf-4a9c-9993-ef6e1c90e22b service nova] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] No waiting events found dispatching network-vif-plugged-3df34aa7-452f-49b6-9a1a-0e902948e0c6 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 916.274868] env[68569]: WARNING nova.compute.manager [req-37144931-7a64-463c-941a-043ae84ca4fe req-3cab52d4-acaf-4a9c-9993-ef6e1c90e22b service nova] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Received unexpected event network-vif-plugged-3df34aa7-452f-49b6-9a1a-0e902948e0c6 for instance with vm_state building and task_state spawning. [ 916.326310] env[68569]: DEBUG nova.network.neutron [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Updated VIF entry in instance network info cache for port 67d7d837-0e5c-4bba-969a-779af4075541. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 916.326310] env[68569]: DEBUG nova.network.neutron [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Updating instance_info_cache with network_info: [{"id": "67d7d837-0e5c-4bba-969a-779af4075541", "address": "fa:16:3e:4e:e5:8f", "network": {"id": "7738c508-0306-4079-a892-0554c562ce54", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1732251555-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "062b8ddb5f0d46d08425a66db32471be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67d7d837-0e", "ovs_interfaceid": "67d7d837-0e5c-4bba-969a-779af4075541", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.588134] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 26625edb-06ca-48cc-aaf1-3c55a6ea942b] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 916.590352] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167226, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.286049} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.590677] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 916.591612] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1449300f-f002-45d4-b7ad-06a966b6c467 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.614406] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] b91304c7-e74d-402b-865b-150e0057c895/b91304c7-e74d-402b-865b-150e0057c895.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 916.621812] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0508bf8-00ac-40fb-86fd-b125e8976320 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.645999] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5282b08e-1bfb-a285-9b30-d4e600a22e3c, 'name': SearchDatastore_Task, 'duration_secs': 0.028307} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.649714] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.649950] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 916.650223] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.650707] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.650903] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 916.651755] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167227, 'name': Rename_Task, 'duration_secs': 0.390927} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.651755] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 916.651755] env[68569]: value = "task-3167228" [ 916.651755] env[68569]: _type = "Task" [ 916.651755] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.652045] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-167e8af0-59cc-4b9b-86bb-21f480d1e376 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.653612] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 916.653880] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12fc6570-4e21-413c-b5fe-733029971f86 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.666437] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 916.666437] env[68569]: value = "task-3167229" [ 916.666437] env[68569]: _type = "Task" [ 916.666437] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.679075] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167229, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.680114] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 916.680316] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 916.680968] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-356cc73d-7509-41c2-ae4e-db31b2724053 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.687947] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 916.687947] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520493b3-ef12-eb6a-1366-8d91ed83a9fc" [ 916.687947] env[68569]: _type = "Task" [ 916.687947] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.696367] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520493b3-ef12-eb6a-1366-8d91ed83a9fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.704810] env[68569]: DEBUG nova.compute.utils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 916.708940] env[68569]: DEBUG nova.compute.manager [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 916.709143] env[68569]: DEBUG nova.network.neutron [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 916.755303] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c254006c-6f9c-4538-83ee-68a4c7446fdb tempest-VolumesAdminNegativeTest-2040182490 tempest-VolumesAdminNegativeTest-2040182490-project-member] Lock "fb97d2dd-d42a-42e8-9a36-5c913a58b891" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.208s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.757012] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fbdb1f8-9f67-4103-9758-7d87a312289d tempest-ServerPasswordTestJSON-111080757 tempest-ServerPasswordTestJSON-111080757-project-member] Lock "dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.277s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.766069] env[68569]: DEBUG nova.policy [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a84e1c69c5454ffcab15cea91a4c1dfb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03a77ea99dbb445396a7ac5888135321', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 916.819620] env[68569]: DEBUG nova.network.neutron [None req-d5ac0797-ebe8-4216-bc5a-3e588f5536b0 tempest-ServersAdminTestJSON-796867534 tempest-ServersAdminTestJSON-796867534-project-admin] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Updating instance_info_cache with network_info: [{"id": "948d6af6-c562-4b44-b852-89a32cf79dad", "address": "fa:16:3e:9a:c3:6c", "network": {"id": "cf9f728e-bea5-495e-8933-c454fb6f87a7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1703203184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8bd8ff748a34e7a83ec0edbfa148aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap948d6af6-c5", "ovs_interfaceid": "948d6af6-c562-4b44-b852-89a32cf79dad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.828060] env[68569]: DEBUG oslo_concurrency.lockutils [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] Releasing lock "refresh_cache-3551627b-9c90-43ea-bae7-d186eaa53c6b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.828353] env[68569]: DEBUG nova.compute.manager [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Received event network-vif-deleted-6454ea52-76d3-478e-b299-b8484a3f5300 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 916.828576] env[68569]: INFO nova.compute.manager [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Neutron deleted interface 6454ea52-76d3-478e-b299-b8484a3f5300; detaching it from the instance and deleting it from the info cache [ 916.828774] env[68569]: DEBUG nova.network.neutron [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.003125] env[68569]: DEBUG nova.network.neutron [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Successfully updated port: 3df34aa7-452f-49b6-9a1a-0e902948e0c6 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 917.045565] env[68569]: DEBUG nova.compute.manager [req-71b0a561-64d3-4771-938f-8d176ab2c155 req-2a7c6e40-e06d-41ac-b93a-00131c9035db service nova] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Received event network-changed-3df34aa7-452f-49b6-9a1a-0e902948e0c6 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 917.045786] env[68569]: DEBUG nova.compute.manager [req-71b0a561-64d3-4771-938f-8d176ab2c155 req-2a7c6e40-e06d-41ac-b93a-00131c9035db service nova] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Refreshing instance network info cache due to event network-changed-3df34aa7-452f-49b6-9a1a-0e902948e0c6. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 917.046296] env[68569]: DEBUG oslo_concurrency.lockutils [req-71b0a561-64d3-4771-938f-8d176ab2c155 req-2a7c6e40-e06d-41ac-b93a-00131c9035db service nova] Acquiring lock "refresh_cache-c0211ed8-5995-48f4-b339-99bd4c93254c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.046296] env[68569]: DEBUG oslo_concurrency.lockutils [req-71b0a561-64d3-4771-938f-8d176ab2c155 req-2a7c6e40-e06d-41ac-b93a-00131c9035db service nova] Acquired lock "refresh_cache-c0211ed8-5995-48f4-b339-99bd4c93254c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.046791] env[68569]: DEBUG nova.network.neutron [req-71b0a561-64d3-4771-938f-8d176ab2c155 req-2a7c6e40-e06d-41ac-b93a-00131c9035db service nova] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Refreshing network info cache for port 3df34aa7-452f-49b6-9a1a-0e902948e0c6 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 917.091990] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 1c8dfb47-df19-4101-8d4e-30889d71d7da] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 917.152826] env[68569]: DEBUG nova.network.neutron [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Successfully created port: d99f9970-89b3-4cf5-ae87-3e05a8b03ffa {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.165731] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167228, 'name': ReconfigVM_Task, 'duration_secs': 0.335277} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.166042] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Reconfigured VM instance instance-00000047 to attach disk [datastore2] b91304c7-e74d-402b-865b-150e0057c895/b91304c7-e74d-402b-865b-150e0057c895.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 917.166671] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2080a55-0c9a-4ff5-a7ae-6811a6e30615 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.183575] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167229, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.184261] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 917.184261] env[68569]: value = "task-3167230" [ 917.184261] env[68569]: _type = "Task" [ 917.184261] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.196204] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167230, 'name': Rename_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.201601] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520493b3-ef12-eb6a-1366-8d91ed83a9fc, 'name': SearchDatastore_Task, 'duration_secs': 0.009972} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.202396] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d69fbb8-4ef9-46b5-b59e-8f9380916491 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.208123] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 917.208123] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5231047f-329a-ccdf-a3a4-ab26e59affb4" [ 917.208123] env[68569]: _type = "Task" [ 917.208123] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.212341] env[68569]: DEBUG nova.compute.manager [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 917.225671] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5231047f-329a-ccdf-a3a4-ab26e59affb4, 'name': SearchDatastore_Task, 'duration_secs': 0.010522} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.225977] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.226845] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 3551627b-9c90-43ea-bae7-d186eaa53c6b/3551627b-9c90-43ea-bae7-d186eaa53c6b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 917.226845] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89c17c8e-d89b-437a-b8e1-2aee5c5c8dd5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.233734] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 917.233734] env[68569]: value = "task-3167231" [ 917.233734] env[68569]: _type = "Task" [ 917.233734] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.242225] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167231, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.322367] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d5ac0797-ebe8-4216-bc5a-3e588f5536b0 tempest-ServersAdminTestJSON-796867534 tempest-ServersAdminTestJSON-796867534-project-admin] Releasing lock "refresh_cache-367f4fe5-ffef-45f3-b00e-a5cf0418d5cd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.322602] env[68569]: DEBUG nova.compute.manager [None req-d5ac0797-ebe8-4216-bc5a-3e588f5536b0 tempest-ServersAdminTestJSON-796867534 tempest-ServersAdminTestJSON-796867534-project-admin] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Inject network info {{(pid=68569) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7744}} [ 917.322871] env[68569]: DEBUG nova.compute.manager [None req-d5ac0797-ebe8-4216-bc5a-3e588f5536b0 tempest-ServersAdminTestJSON-796867534 tempest-ServersAdminTestJSON-796867534-project-admin] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] network_info to inject: |[{"id": "948d6af6-c562-4b44-b852-89a32cf79dad", "address": "fa:16:3e:9a:c3:6c", "network": {"id": "cf9f728e-bea5-495e-8933-c454fb6f87a7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1703203184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8bd8ff748a34e7a83ec0edbfa148aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap948d6af6-c5", "ovs_interfaceid": "948d6af6-c562-4b44-b852-89a32cf79dad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7745}} [ 917.328133] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ac0797-ebe8-4216-bc5a-3e588f5536b0 tempest-ServersAdminTestJSON-796867534 tempest-ServersAdminTestJSON-796867534-project-admin] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Reconfiguring VM instance to set the machine id {{(pid=68569) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 917.328483] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b1251be-e361-4074-8c88-a6da7e409e6f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.343161] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c9c99850-29fe-4907-a2b8-f82013f3371b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.354364] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f70432a-a33a-4080-95dd-39d607eb2efb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.368873] env[68569]: DEBUG oslo_vmware.api [None req-d5ac0797-ebe8-4216-bc5a-3e588f5536b0 tempest-ServersAdminTestJSON-796867534 tempest-ServersAdminTestJSON-796867534-project-admin] Waiting for the task: (returnval){ [ 917.368873] env[68569]: value = "task-3167232" [ 917.368873] env[68569]: _type = "Task" [ 917.368873] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.381793] env[68569]: DEBUG oslo_vmware.api [None req-d5ac0797-ebe8-4216-bc5a-3e588f5536b0 tempest-ServersAdminTestJSON-796867534 tempest-ServersAdminTestJSON-796867534-project-admin] Task: {'id': task-3167232, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.396182] env[68569]: DEBUG nova.compute.manager [req-18e82af6-fad2-40e3-8f6a-116080239dca req-919fa4f7-ae01-4fe2-946f-74ff5c13326e service nova] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Detach interface failed, port_id=6454ea52-76d3-478e-b299-b8484a3f5300, reason: Instance 0943cfd5-33fb-4b02-9e4d-93f18385bdae could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 917.506278] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "refresh_cache-c0211ed8-5995-48f4-b339-99bd4c93254c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.597797] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 98d5c760-6da3-49e3-af47-20a8054971f3] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 917.611984] env[68569]: DEBUG nova.network.neutron [req-71b0a561-64d3-4771-938f-8d176ab2c155 req-2a7c6e40-e06d-41ac-b93a-00131c9035db service nova] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 917.661186] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f95fb0-f3ed-46ea-9f57-4700e970dc03 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.671077] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb97d76-6d33-4ca5-8269-b16cbc5e5944 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.722017] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0127de9-70b5-4193-808f-4700a02b5b2b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.725037] env[68569]: DEBUG oslo_vmware.api [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167229, 'name': PowerOnVM_Task, 'duration_secs': 0.648975} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.728763] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 917.729049] env[68569]: DEBUG nova.compute.manager [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 917.731374] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f777408-bfc7-4a6b-900c-44288a7aa68a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.740339] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167230, 'name': Rename_Task, 'duration_secs': 0.141642} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.744194] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 917.745760] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c3f965-333f-4a1e-9a36-618626c2c883 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.750955] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9f7e793-4bed-4937-b40b-8211d67ab47e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.760818] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167231, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.770959] env[68569]: DEBUG nova.compute.provider_tree [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.772532] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 917.772532] env[68569]: value = "task-3167233" [ 917.772532] env[68569]: _type = "Task" [ 917.772532] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.782726] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167233, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.788599] env[68569]: DEBUG nova.network.neutron [req-71b0a561-64d3-4771-938f-8d176ab2c155 req-2a7c6e40-e06d-41ac-b93a-00131c9035db service nova] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.886874] env[68569]: DEBUG oslo_vmware.api [None req-d5ac0797-ebe8-4216-bc5a-3e588f5536b0 tempest-ServersAdminTestJSON-796867534 tempest-ServersAdminTestJSON-796867534-project-admin] Task: {'id': task-3167232, 'name': ReconfigVM_Task, 'duration_secs': 0.234681} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.887768] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ac0797-ebe8-4216-bc5a-3e588f5536b0 tempest-ServersAdminTestJSON-796867534 tempest-ServersAdminTestJSON-796867534-project-admin] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Reconfigured VM instance to set the machine id {{(pid=68569) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 918.105116] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 8eb18d79-e164-4e66-83b0-7b40d04c30a8] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 918.231636] env[68569]: DEBUG nova.compute.manager [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 918.267548] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.277690] env[68569]: DEBUG nova.scheduler.client.report [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 918.282908] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167231, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569473} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.287606] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 3551627b-9c90-43ea-bae7-d186eaa53c6b/3551627b-9c90-43ea-bae7-d186eaa53c6b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 918.288599] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 918.290543] env[68569]: DEBUG nova.virt.hardware [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 918.293089] env[68569]: DEBUG nova.virt.hardware [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 918.293089] env[68569]: DEBUG nova.virt.hardware [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 918.293089] env[68569]: DEBUG nova.virt.hardware [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 918.293089] env[68569]: DEBUG nova.virt.hardware [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 918.293089] env[68569]: DEBUG nova.virt.hardware [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 918.293269] env[68569]: DEBUG nova.virt.hardware [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 918.293269] env[68569]: DEBUG nova.virt.hardware [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 918.293269] env[68569]: DEBUG nova.virt.hardware [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 918.293269] env[68569]: DEBUG nova.virt.hardware [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 918.293269] env[68569]: DEBUG nova.virt.hardware [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 918.293582] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-23335c36-2bc9-4d02-b696-44ea5311afa3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.295454] env[68569]: DEBUG oslo_concurrency.lockutils [req-71b0a561-64d3-4771-938f-8d176ab2c155 req-2a7c6e40-e06d-41ac-b93a-00131c9035db service nova] Releasing lock "refresh_cache-c0211ed8-5995-48f4-b339-99bd4c93254c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.296954] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ecb068-5d53-4c7e-9a18-1832f2584023 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.304198] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "refresh_cache-c0211ed8-5995-48f4-b339-99bd4c93254c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 918.304198] env[68569]: DEBUG nova.network.neutron [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 918.308947] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167233, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.314421] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 918.314421] env[68569]: value = "task-3167234" [ 918.314421] env[68569]: _type = "Task" [ 918.314421] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.315914] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60cda741-f517-4522-a380-625d0313f9be {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.339806] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167234, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.446367] env[68569]: INFO nova.compute.manager [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Rebuilding instance [ 918.489642] env[68569]: DEBUG nova.compute.manager [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 918.490755] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5e47e2-d534-4977-a094-20fa55f826f3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.609479] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 7129a57f-e639-49ae-96a9-3c1d966034a8] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 918.748569] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquiring lock "559a1eff-8892-4dda-a540-4a053ae0ef2b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.748848] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Lock "559a1eff-8892-4dda-a540-4a053ae0ef2b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.749074] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquiring lock "559a1eff-8892-4dda-a540-4a053ae0ef2b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.749260] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Lock "559a1eff-8892-4dda-a540-4a053ae0ef2b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.749429] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Lock "559a1eff-8892-4dda-a540-4a053ae0ef2b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.752404] env[68569]: INFO nova.compute.manager [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Terminating instance [ 918.787391] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.595s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.787896] env[68569]: DEBUG nova.compute.manager [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 918.794020] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.378s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.794020] env[68569]: DEBUG nova.objects.instance [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Lazy-loading 'resources' on Instance uuid f84204a9-aeea-498e-9682-298e581b34e3 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.800705] env[68569]: DEBUG oslo_vmware.api [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167233, 'name': PowerOnVM_Task, 'duration_secs': 0.620577} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.800939] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 918.805256] env[68569]: INFO nova.compute.manager [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Took 8.01 seconds to spawn the instance on the hypervisor. [ 918.805456] env[68569]: DEBUG nova.compute.manager [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 918.809023] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9af0b93-fe3b-488c-bffb-915334337b00 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.835057] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167234, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074969} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.835057] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 918.835057] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549e72bf-9a0e-45ec-9f1f-6cc5884bf1ac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.859519] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 3551627b-9c90-43ea-bae7-d186eaa53c6b/3551627b-9c90-43ea-bae7-d186eaa53c6b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 918.860657] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4ba0146-bcf9-402e-a0aa-b820c29e62fc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.879384] env[68569]: DEBUG nova.network.neutron [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 918.888167] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 918.888167] env[68569]: value = "task-3167235" [ 918.888167] env[68569]: _type = "Task" [ 918.888167] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.896860] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167235, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.076861] env[68569]: DEBUG nova.compute.manager [req-e18b1fba-a590-43f4-9656-b2f0f258af3c req-a2582a0f-f2ba-4623-a0ae-806a6e8f2fdc service nova] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Received event network-vif-plugged-d99f9970-89b3-4cf5-ae87-3e05a8b03ffa {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 919.077086] env[68569]: DEBUG oslo_concurrency.lockutils [req-e18b1fba-a590-43f4-9656-b2f0f258af3c req-a2582a0f-f2ba-4623-a0ae-806a6e8f2fdc service nova] Acquiring lock "7696390d-a097-4b6d-827d-92f3165a4188-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.077300] env[68569]: DEBUG oslo_concurrency.lockutils [req-e18b1fba-a590-43f4-9656-b2f0f258af3c req-a2582a0f-f2ba-4623-a0ae-806a6e8f2fdc service nova] Lock "7696390d-a097-4b6d-827d-92f3165a4188-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.077466] env[68569]: DEBUG oslo_concurrency.lockutils [req-e18b1fba-a590-43f4-9656-b2f0f258af3c req-a2582a0f-f2ba-4623-a0ae-806a6e8f2fdc service nova] Lock "7696390d-a097-4b6d-827d-92f3165a4188-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.077631] env[68569]: DEBUG nova.compute.manager [req-e18b1fba-a590-43f4-9656-b2f0f258af3c req-a2582a0f-f2ba-4623-a0ae-806a6e8f2fdc service nova] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] No waiting events found dispatching network-vif-plugged-d99f9970-89b3-4cf5-ae87-3e05a8b03ffa {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 919.077793] env[68569]: WARNING nova.compute.manager [req-e18b1fba-a590-43f4-9656-b2f0f258af3c req-a2582a0f-f2ba-4623-a0ae-806a6e8f2fdc service nova] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Received unexpected event network-vif-plugged-d99f9970-89b3-4cf5-ae87-3e05a8b03ffa for instance with vm_state building and task_state spawning. [ 919.117616] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 925400c4-3b33-4f4a-9f63-3ceec06cf0b7] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 919.220356] env[68569]: DEBUG nova.network.neutron [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Updating instance_info_cache with network_info: [{"id": "3df34aa7-452f-49b6-9a1a-0e902948e0c6", "address": "fa:16:3e:d0:31:6d", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3df34aa7-45", "ovs_interfaceid": "3df34aa7-452f-49b6-9a1a-0e902948e0c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.221558] env[68569]: DEBUG nova.network.neutron [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Successfully updated port: d99f9970-89b3-4cf5-ae87-3e05a8b03ffa {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 919.256032] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquiring lock "refresh_cache-559a1eff-8892-4dda-a540-4a053ae0ef2b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.256032] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquired lock "refresh_cache-559a1eff-8892-4dda-a540-4a053ae0ef2b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 919.256213] env[68569]: DEBUG nova.network.neutron [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 919.292600] env[68569]: DEBUG nova.compute.utils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 919.296125] env[68569]: DEBUG nova.compute.manager [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 919.296125] env[68569]: DEBUG nova.network.neutron [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 919.343185] env[68569]: INFO nova.compute.manager [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Took 52.22 seconds to build instance. [ 919.368828] env[68569]: DEBUG nova.policy [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f83489e23864ab286b0d29f9f89d7ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8b924e47d91a488e9d912a5369792aa5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 919.403199] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167235, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.503995] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 919.504637] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45f2b653-1836-4905-87b0-ee23b6f4f5ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.510861] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 919.510861] env[68569]: value = "task-3167236" [ 919.510861] env[68569]: _type = "Task" [ 919.510861] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.520901] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167236, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.623753] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 87f6c6d0-4e3d-4608-bd0d-4771bc9c174a] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 919.677622] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1380bd-1f5b-4c7f-b54c-43c52db91eb8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.693617] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590dfb2e-e457-491b-a731-91c640b8a99d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.737926] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "refresh_cache-c0211ed8-5995-48f4-b339-99bd4c93254c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 919.738238] env[68569]: DEBUG nova.compute.manager [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Instance network_info: |[{"id": "3df34aa7-452f-49b6-9a1a-0e902948e0c6", "address": "fa:16:3e:d0:31:6d", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3df34aa7-45", "ovs_interfaceid": "3df34aa7-452f-49b6-9a1a-0e902948e0c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 919.738768] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "refresh_cache-7696390d-a097-4b6d-827d-92f3165a4188" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.738875] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquired lock "refresh_cache-7696390d-a097-4b6d-827d-92f3165a4188" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 919.739037] env[68569]: DEBUG nova.network.neutron [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 919.740880] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:31:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3df34aa7-452f-49b6-9a1a-0e902948e0c6', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 919.749468] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 919.750285] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9346e61b-0414-4615-87dd-0c5c1c864f58 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.756593] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 919.757371] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c7d6e0b-ec13-4ea3-b9f5-76fb223f63e7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.785915] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26987726-07c3-4043-9427-e3645ca26e05 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.791674] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 919.791674] env[68569]: value = "task-3167237" [ 919.791674] env[68569]: _type = "Task" [ 919.791674] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.807022] env[68569]: DEBUG nova.compute.manager [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 919.807318] env[68569]: DEBUG nova.compute.provider_tree [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 919.814001] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167237, 'name': CreateVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.817667] env[68569]: DEBUG nova.network.neutron [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 919.838194] env[68569]: DEBUG nova.network.neutron [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 919.845512] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2d6faea8-1193-4d40-82f9-b0bde9442d5e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "b91304c7-e74d-402b-865b-150e0057c895" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.548s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.905251] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167235, 'name': ReconfigVM_Task, 'duration_secs': 0.640508} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.905762] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 3551627b-9c90-43ea-bae7-d186eaa53c6b/3551627b-9c90-43ea-bae7-d186eaa53c6b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 919.906241] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8fec2030-3a61-4f5f-b3e1-14266d6d926f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.913151] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 919.913151] env[68569]: value = "task-3167238" [ 919.913151] env[68569]: _type = "Task" [ 919.913151] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.921818] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167238, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.023682] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167236, 'name': PowerOffVM_Task, 'duration_secs': 0.271039} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.024198] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 920.024410] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 920.025604] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aebdc8e-2b64-470d-a248-7f86afd27161 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.036608] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 920.037428] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4d3d802-afe6-4746-a488-fecbf1cbad98 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.052999] env[68569]: DEBUG nova.network.neutron [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.078694] env[68569]: DEBUG nova.network.neutron [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Updating instance_info_cache with network_info: [{"id": "d99f9970-89b3-4cf5-ae87-3e05a8b03ffa", "address": "fa:16:3e:41:a8:0e", "network": {"id": "f10bb94d-d811-400a-ada9-30d7720c1ddc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2037478415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "03a77ea99dbb445396a7ac5888135321", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd99f9970-89", "ovs_interfaceid": "d99f9970-89b3-4cf5-ae87-3e05a8b03ffa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.108039] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 920.108291] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 920.108623] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleting the datastore file [datastore2] 398dd3c7-c630-4a29-b204-80f6fb394ce8 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 920.108695] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d820424-256a-48f0-ba8d-089edd964f5d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.115392] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 920.115392] env[68569]: value = "task-3167240" [ 920.115392] env[68569]: _type = "Task" [ 920.115392] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.133918] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 7693ef68-d7e5-4899-9615-9f2a1dd0bce8] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 920.138288] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167240, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.304468] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167237, 'name': CreateVM_Task, 'duration_secs': 0.401082} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.305486] env[68569]: DEBUG nova.network.neutron [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Successfully created port: d13b7716-5b05-4896-9da9-e3674d55a3a0 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 920.309457] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 920.309457] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.309457] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.309457] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 920.312579] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea674f5a-2a2c-4e96-9349-c7b53d305056 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.314972] env[68569]: DEBUG nova.scheduler.client.report [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 920.321785] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 920.321785] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5205bcf3-763f-7439-bee2-0c7f224d123d" [ 920.321785] env[68569]: _type = "Task" [ 920.321785] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.331955] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5205bcf3-763f-7439-bee2-0c7f224d123d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.424547] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167238, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.559022] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Releasing lock "refresh_cache-559a1eff-8892-4dda-a540-4a053ae0ef2b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.559022] env[68569]: DEBUG nova.compute.manager [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 920.559022] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 920.559360] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8562ce6-56bb-4bcb-a400-9bb31a085ac4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.570025] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 920.570025] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c513c7e9-5396-4593-9212-38b3b946568f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.576566] env[68569]: DEBUG oslo_vmware.api [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 920.576566] env[68569]: value = "task-3167241" [ 920.576566] env[68569]: _type = "Task" [ 920.576566] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.581714] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Releasing lock "refresh_cache-7696390d-a097-4b6d-827d-92f3165a4188" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.582455] env[68569]: DEBUG nova.compute.manager [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Instance network_info: |[{"id": "d99f9970-89b3-4cf5-ae87-3e05a8b03ffa", "address": "fa:16:3e:41:a8:0e", "network": {"id": "f10bb94d-d811-400a-ada9-30d7720c1ddc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2037478415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "03a77ea99dbb445396a7ac5888135321", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd99f9970-89", "ovs_interfaceid": "d99f9970-89b3-4cf5-ae87-3e05a8b03ffa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 920.586264] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:a8:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd72ef32-a57c-43b0-93df-e8a030987d44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd99f9970-89b3-4cf5-ae87-3e05a8b03ffa', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 920.594661] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 920.594910] env[68569]: DEBUG oslo_vmware.api [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167241, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.595247] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 920.595576] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2bc5a5c0-01af-4e4f-90df-4374edc8882a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.622944] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 920.622944] env[68569]: value = "task-3167242" [ 920.622944] env[68569]: _type = "Task" [ 920.622944] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.629602] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167240, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280426} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.630309] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 920.630578] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 920.630898] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 920.637747] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167242, 'name': CreateVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.643094] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: d6c45731-d76a-46cf-9b7d-be035a200948] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 920.816962] env[68569]: DEBUG nova.compute.manager [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 920.821260] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.030s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.824473] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.495s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.826800] env[68569]: INFO nova.compute.claims [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 920.841489] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5205bcf3-763f-7439-bee2-0c7f224d123d, 'name': SearchDatastore_Task, 'duration_secs': 0.01845} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.842599] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.843026] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 920.843655] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.843920] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.844239] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 920.844993] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61caae45-a48c-41b6-84ea-5d1e04f2d3ba {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.855255] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 920.856660] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 920.860683] env[68569]: DEBUG nova.virt.hardware [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 920.860683] env[68569]: DEBUG nova.virt.hardware [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 920.860683] env[68569]: DEBUG nova.virt.hardware [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 920.860916] env[68569]: DEBUG nova.virt.hardware [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 920.861191] env[68569]: DEBUG nova.virt.hardware [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 920.861191] env[68569]: DEBUG nova.virt.hardware [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 920.861398] env[68569]: DEBUG nova.virt.hardware [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 920.861478] env[68569]: DEBUG nova.virt.hardware [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 920.861629] env[68569]: DEBUG nova.virt.hardware [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 920.862154] env[68569]: DEBUG nova.virt.hardware [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 920.862154] env[68569]: DEBUG nova.virt.hardware [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 920.862329] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c67f4d5-ee4d-4c7a-a72b-1047ccdd4e85 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.865417] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9171d58b-00aa-41c6-aca2-8d421dfa78a2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.870866] env[68569]: INFO nova.scheduler.client.report [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Deleted allocations for instance f84204a9-aeea-498e-9682-298e581b34e3 [ 920.878572] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 920.878572] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525ce271-a0bb-343f-bc4c-80e5958b5c74" [ 920.878572] env[68569]: _type = "Task" [ 920.878572] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.880569] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318d5ff1-e2ef-4b64-a5a1-f27db5d96a1b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.902183] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525ce271-a0bb-343f-bc4c-80e5958b5c74, 'name': SearchDatastore_Task, 'duration_secs': 0.017081} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.902961] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-047fe7ba-228b-4c25-9e95-96853758faf7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.910208] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 920.910208] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526f200c-987b-5203-2f01-50b588f33e58" [ 920.910208] env[68569]: _type = "Task" [ 920.910208] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.927796] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526f200c-987b-5203-2f01-50b588f33e58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.936561] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167238, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.088073] env[68569]: DEBUG oslo_vmware.api [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167241, 'name': PowerOffVM_Task, 'duration_secs': 0.236962} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.088398] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 921.088568] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 921.088998] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8aa79bd4-aeaa-4843-8f76-1155a5532abc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.114049] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 921.114049] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 921.114049] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Deleting the datastore file [datastore1] 559a1eff-8892-4dda-a540-4a053ae0ef2b {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 921.114655] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8103f0a8-4600-4b10-acbb-5bcd5213108e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.124680] env[68569]: DEBUG oslo_vmware.api [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for the task: (returnval){ [ 921.124680] env[68569]: value = "task-3167244" [ 921.124680] env[68569]: _type = "Task" [ 921.124680] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.137268] env[68569]: DEBUG nova.compute.manager [req-8f587424-71c0-4e9f-9ed5-79be4f063fce req-37fa46c9-23b4-44ac-a20e-cb4ca805291d service nova] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Received event network-changed-d99f9970-89b3-4cf5-ae87-3e05a8b03ffa {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 921.137268] env[68569]: DEBUG nova.compute.manager [req-8f587424-71c0-4e9f-9ed5-79be4f063fce req-37fa46c9-23b4-44ac-a20e-cb4ca805291d service nova] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Refreshing instance network info cache due to event network-changed-d99f9970-89b3-4cf5-ae87-3e05a8b03ffa. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 921.137504] env[68569]: DEBUG oslo_concurrency.lockutils [req-8f587424-71c0-4e9f-9ed5-79be4f063fce req-37fa46c9-23b4-44ac-a20e-cb4ca805291d service nova] Acquiring lock "refresh_cache-7696390d-a097-4b6d-827d-92f3165a4188" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.137661] env[68569]: DEBUG oslo_concurrency.lockutils [req-8f587424-71c0-4e9f-9ed5-79be4f063fce req-37fa46c9-23b4-44ac-a20e-cb4ca805291d service nova] Acquired lock "refresh_cache-7696390d-a097-4b6d-827d-92f3165a4188" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.137818] env[68569]: DEBUG nova.network.neutron [req-8f587424-71c0-4e9f-9ed5-79be4f063fce req-37fa46c9-23b4-44ac-a20e-cb4ca805291d service nova] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Refreshing network info cache for port d99f9970-89b3-4cf5-ae87-3e05a8b03ffa {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 921.150749] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 3ee3365b-0799-414b-b2a1-1d219bd9db96] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 921.154092] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167242, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.160245] env[68569]: DEBUG oslo_vmware.api [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167244, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.211483] env[68569]: DEBUG oslo_concurrency.lockutils [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "b91304c7-e74d-402b-865b-150e0057c895" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.211778] env[68569]: DEBUG oslo_concurrency.lockutils [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "b91304c7-e74d-402b-865b-150e0057c895" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.211979] env[68569]: INFO nova.compute.manager [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Shelving [ 921.384346] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07caa95c-92cf-4886-b314-59033589630d tempest-ServerRescueTestJSONUnderV235-1371249363 tempest-ServerRescueTestJSONUnderV235-1371249363-project-member] Lock "f84204a9-aeea-498e-9682-298e581b34e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.373s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.428230] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526f200c-987b-5203-2f01-50b588f33e58, 'name': SearchDatastore_Task, 'duration_secs': 0.014625} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.428568] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.428836] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] c0211ed8-5995-48f4-b339-99bd4c93254c/c0211ed8-5995-48f4-b339-99bd4c93254c.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 921.429949] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2faefe10-35c5-4ec2-bcad-08d17f46560e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.435140] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167238, 'name': Rename_Task, 'duration_secs': 1.201031} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.435845] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 921.436719] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9aa4e199-555d-4353-b85d-fac9e7481d84 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.440264] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 921.440264] env[68569]: value = "task-3167245" [ 921.440264] env[68569]: _type = "Task" [ 921.440264] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.445117] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 921.445117] env[68569]: value = "task-3167246" [ 921.445117] env[68569]: _type = "Task" [ 921.445117] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.452152] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167245, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.457389] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167246, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.645815] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167242, 'name': CreateVM_Task, 'duration_secs': 0.565365} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.650998] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 921.653349] env[68569]: DEBUG oslo_vmware.api [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Task: {'id': task-3167244, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130214} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.654492] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.654946] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.655969] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 921.657023] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 921.657023] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 921.657023] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 921.657023] env[68569]: INFO nova.compute.manager [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Took 1.10 seconds to destroy the instance on the hypervisor. [ 921.657454] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 921.663984] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5ce0e37-898b-454e-83b1-4d0a1fce5dc7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.665097] env[68569]: DEBUG nova.compute.manager [-] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 921.665238] env[68569]: DEBUG nova.network.neutron [-] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 921.668793] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 5fe0475b-96cd-4b9a-8fd4-fe3d4e7f3155] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 921.675220] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 921.675220] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d9f11d-5d32-0cb3-f945-18dcfd2ea2b0" [ 921.675220] env[68569]: _type = "Task" [ 921.675220] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.685153] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d9f11d-5d32-0cb3-f945-18dcfd2ea2b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.694100] env[68569]: DEBUG nova.network.neutron [-] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 921.699025] env[68569]: DEBUG nova.virt.hardware [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 921.699306] env[68569]: DEBUG nova.virt.hardware [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 921.699500] env[68569]: DEBUG nova.virt.hardware [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 921.699713] env[68569]: DEBUG nova.virt.hardware [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 921.699896] env[68569]: DEBUG nova.virt.hardware [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 921.700196] env[68569]: DEBUG nova.virt.hardware [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 921.700495] env[68569]: DEBUG nova.virt.hardware [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 921.700921] env[68569]: DEBUG nova.virt.hardware [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 921.701170] env[68569]: DEBUG nova.virt.hardware [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 921.701577] env[68569]: DEBUG nova.virt.hardware [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 921.701577] env[68569]: DEBUG nova.virt.hardware [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 921.702595] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db0247c-2dcb-49b9-93a1-a95a4f11526e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.717752] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f45ef23c-742b-457d-b2bf-84a4e2698f0a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.740770] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:8c:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd33839ae-40ca-471b-92e3-eb282b920682', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d1e0d6d-0c37-425c-a138-7bc79f96a3a2', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 921.749268] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 921.750068] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.751891] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 921.751891] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86f72c25-d1ee-45d6-b976-579c66b6bd87 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.752572] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-68d86bcd-f60f-44a7-8e9e-40520f9c0d26 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.776820] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 921.776820] env[68569]: value = "task-3167247" [ 921.776820] env[68569]: _type = "Task" [ 921.776820] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.776953] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 921.776953] env[68569]: value = "task-3167248" [ 921.776953] env[68569]: _type = "Task" [ 921.776953] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.789527] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167247, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.792938] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167248, 'name': CreateVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.964881] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167245, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.973039] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167246, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.008286] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4b32342-fc13-4cce-ab73-df3efe928560 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.008639] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4b32342-fc13-4cce-ab73-df3efe928560 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.008840] env[68569]: DEBUG nova.compute.manager [None req-e4b32342-fc13-4cce-ab73-df3efe928560 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 922.009940] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76cabed-a585-4132-ac2e-6cc1d19d309c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.021385] env[68569]: DEBUG nova.compute.manager [None req-e4b32342-fc13-4cce-ab73-df3efe928560 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68569) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 922.022128] env[68569]: DEBUG nova.objects.instance [None req-e4b32342-fc13-4cce-ab73-df3efe928560 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lazy-loading 'flavor' on Instance uuid a9e87dfc-6e00-4e55-8a8f-bc3174b991da {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 922.057415] env[68569]: DEBUG nova.network.neutron [req-8f587424-71c0-4e9f-9ed5-79be4f063fce req-37fa46c9-23b4-44ac-a20e-cb4ca805291d service nova] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Updated VIF entry in instance network info cache for port d99f9970-89b3-4cf5-ae87-3e05a8b03ffa. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 922.058080] env[68569]: DEBUG nova.network.neutron [req-8f587424-71c0-4e9f-9ed5-79be4f063fce req-37fa46c9-23b4-44ac-a20e-cb4ca805291d service nova] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Updating instance_info_cache with network_info: [{"id": "d99f9970-89b3-4cf5-ae87-3e05a8b03ffa", "address": "fa:16:3e:41:a8:0e", "network": {"id": "f10bb94d-d811-400a-ada9-30d7720c1ddc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2037478415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "03a77ea99dbb445396a7ac5888135321", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd99f9970-89", "ovs_interfaceid": "d99f9970-89b3-4cf5-ae87-3e05a8b03ffa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.175709] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 2c47ef2c-873a-4cb3-9a36-aa2155911b6e] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 922.189731] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d9f11d-5d32-0cb3-f945-18dcfd2ea2b0, 'name': SearchDatastore_Task, 'duration_secs': 0.013444} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.189731] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.189731] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 922.189731] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.190201] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.190201] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 922.193254] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41203ca4-2eb0-41be-94e7-a7ffceb8ed14 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.195607] env[68569]: DEBUG nova.network.neutron [-] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.205816] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 922.205816] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 922.206217] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d796572-3c7e-451e-a035-ccf49380e648 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.216924] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 922.216924] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529ef65c-0906-0ce5-15fe-e43dfca3f523" [ 922.216924] env[68569]: _type = "Task" [ 922.216924] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.226934] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529ef65c-0906-0ce5-15fe-e43dfca3f523, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.269255] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b3ec769-c477-436c-8e26-932cf85097cd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.277863] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d897e01-c391-477c-98ec-1b0bd3eaf539 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.296473] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167247, 'name': PowerOffVM_Task, 'duration_secs': 0.257835} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.323017] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 922.323017] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167248, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.323689] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c7690d-9c9e-4601-9a74-f521e6a04aaa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.326821] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa333ec-1c19-4e3d-835d-2c07e6fcd06c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.348752] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46151d2-6abd-4491-a856-574f1a57d082 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.354289] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4078048-4de2-40fb-8b3f-5e87fb415a1e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.367938] env[68569]: DEBUG nova.compute.provider_tree [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.453014] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167245, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.755487} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.456431] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] c0211ed8-5995-48f4-b339-99bd4c93254c/c0211ed8-5995-48f4-b339-99bd4c93254c.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 922.456681] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 922.456944] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1ae70548-004b-4ac6-b672-ead101ef6918 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.464440] env[68569]: DEBUG oslo_vmware.api [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167246, 'name': PowerOnVM_Task, 'duration_secs': 0.698824} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.465702] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 922.465920] env[68569]: INFO nova.compute.manager [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Took 9.22 seconds to spawn the instance on the hypervisor. [ 922.466141] env[68569]: DEBUG nova.compute.manager [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 922.466437] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 922.466437] env[68569]: value = "task-3167249" [ 922.466437] env[68569]: _type = "Task" [ 922.466437] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.467140] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539a6160-d431-4137-b6bc-eb4627af7bbb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.470437] env[68569]: DEBUG nova.network.neutron [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Successfully updated port: d13b7716-5b05-4896-9da9-e3674d55a3a0 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 922.490020] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167249, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.569842] env[68569]: DEBUG oslo_concurrency.lockutils [req-8f587424-71c0-4e9f-9ed5-79be4f063fce req-37fa46c9-23b4-44ac-a20e-cb4ca805291d service nova] Releasing lock "refresh_cache-7696390d-a097-4b6d-827d-92f3165a4188" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.689831] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.690133] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Cleaning up deleted instances with incomplete migration {{(pid=68569) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11879}} [ 922.699493] env[68569]: INFO nova.compute.manager [-] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Took 1.03 seconds to deallocate network for instance. [ 922.731363] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529ef65c-0906-0ce5-15fe-e43dfca3f523, 'name': SearchDatastore_Task, 'duration_secs': 0.0133} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.732396] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b5a2da3-6507-4d27-a6c1-d2e8debad8f8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.739050] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 922.739050] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524c1aa6-66ea-7bf4-e69e-b7049780a690" [ 922.739050] env[68569]: _type = "Task" [ 922.739050] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.747450] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524c1aa6-66ea-7bf4-e69e-b7049780a690, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.794974] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167248, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.874878] env[68569]: DEBUG nova.scheduler.client.report [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 922.879247] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 922.880157] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ebbbf5d8-91a2-470b-aeaf-e4f67df2ec2d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.888073] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 922.888073] env[68569]: value = "task-3167250" [ 922.888073] env[68569]: _type = "Task" [ 922.888073] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.896460] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167250, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.976637] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.976865] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquired lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.977086] env[68569]: DEBUG nova.network.neutron [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 922.981582] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167249, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066001} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.982145] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 922.983225] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632fa48c-bab1-486f-9afd-53acea329235 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.012713] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] c0211ed8-5995-48f4-b339-99bd4c93254c/c0211ed8-5995-48f4-b339-99bd4c93254c.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 923.015287] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38d2fd62-ce61-4369-b661-ce47284a34a9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.032583] env[68569]: INFO nova.compute.manager [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Took 51.56 seconds to build instance. [ 923.033954] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4b32342-fc13-4cce-ab73-df3efe928560 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 923.034050] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6af8687-7726-49dd-a964-4338a40f16b9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.044232] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 923.044232] env[68569]: value = "task-3167251" [ 923.044232] env[68569]: _type = "Task" [ 923.044232] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.045716] env[68569]: DEBUG oslo_vmware.api [None req-e4b32342-fc13-4cce-ab73-df3efe928560 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 923.045716] env[68569]: value = "task-3167252" [ 923.045716] env[68569]: _type = "Task" [ 923.045716] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.060741] env[68569]: DEBUG oslo_vmware.api [None req-e4b32342-fc13-4cce-ab73-df3efe928560 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167252, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.061038] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167251, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.170407] env[68569]: DEBUG nova.compute.manager [req-57a9e055-40ed-4be1-91cd-36a8f750b579 req-1f0d132e-62e1-43cc-b823-6ef5a74ae1dd service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Received event network-vif-plugged-d13b7716-5b05-4896-9da9-e3674d55a3a0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 923.171099] env[68569]: DEBUG oslo_concurrency.lockutils [req-57a9e055-40ed-4be1-91cd-36a8f750b579 req-1f0d132e-62e1-43cc-b823-6ef5a74ae1dd service nova] Acquiring lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.171532] env[68569]: DEBUG oslo_concurrency.lockutils [req-57a9e055-40ed-4be1-91cd-36a8f750b579 req-1f0d132e-62e1-43cc-b823-6ef5a74ae1dd service nova] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.172752] env[68569]: DEBUG oslo_concurrency.lockutils [req-57a9e055-40ed-4be1-91cd-36a8f750b579 req-1f0d132e-62e1-43cc-b823-6ef5a74ae1dd service nova] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.172752] env[68569]: DEBUG nova.compute.manager [req-57a9e055-40ed-4be1-91cd-36a8f750b579 req-1f0d132e-62e1-43cc-b823-6ef5a74ae1dd service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] No waiting events found dispatching network-vif-plugged-d13b7716-5b05-4896-9da9-e3674d55a3a0 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 923.173051] env[68569]: WARNING nova.compute.manager [req-57a9e055-40ed-4be1-91cd-36a8f750b579 req-1f0d132e-62e1-43cc-b823-6ef5a74ae1dd service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Received unexpected event network-vif-plugged-d13b7716-5b05-4896-9da9-e3674d55a3a0 for instance with vm_state building and task_state spawning. [ 923.173331] env[68569]: DEBUG nova.compute.manager [req-57a9e055-40ed-4be1-91cd-36a8f750b579 req-1f0d132e-62e1-43cc-b823-6ef5a74ae1dd service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Received event network-changed-d13b7716-5b05-4896-9da9-e3674d55a3a0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 923.173653] env[68569]: DEBUG nova.compute.manager [req-57a9e055-40ed-4be1-91cd-36a8f750b579 req-1f0d132e-62e1-43cc-b823-6ef5a74ae1dd service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Refreshing instance network info cache due to event network-changed-d13b7716-5b05-4896-9da9-e3674d55a3a0. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 923.173891] env[68569]: DEBUG oslo_concurrency.lockutils [req-57a9e055-40ed-4be1-91cd-36a8f750b579 req-1f0d132e-62e1-43cc-b823-6ef5a74ae1dd service nova] Acquiring lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.194244] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 923.208030] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.250486] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524c1aa6-66ea-7bf4-e69e-b7049780a690, 'name': SearchDatastore_Task, 'duration_secs': 0.009037} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.250486] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.250756] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 7696390d-a097-4b6d-827d-92f3165a4188/7696390d-a097-4b6d-827d-92f3165a4188.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 923.250857] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da394420-be4c-468b-a83e-1cb260c30d54 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.258836] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 923.258836] env[68569]: value = "task-3167253" [ 923.258836] env[68569]: _type = "Task" [ 923.258836] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.269428] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167253, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.293859] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167248, 'name': CreateVM_Task, 'duration_secs': 1.457147} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.293998] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 923.294868] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.295052] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.295393] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 923.295815] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5242e77-4b5f-4560-8c65-779a3cb00ba1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.301065] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 923.301065] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e88c76-709e-b105-6db8-d4f17fc03a25" [ 923.301065] env[68569]: _type = "Task" [ 923.301065] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.311284] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e88c76-709e-b105-6db8-d4f17fc03a25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.381583] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.557s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.382889] env[68569]: DEBUG nova.compute.manager [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 923.387627] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.175s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.387627] env[68569]: DEBUG nova.objects.instance [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Lazy-loading 'resources' on Instance uuid 0943cfd5-33fb-4b02-9e4d-93f18385bdae {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 923.404271] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167250, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.528014] env[68569]: DEBUG nova.network.neutron [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.535580] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1810342-f35d-4756-ad83-273d19a490bc tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "3551627b-9c90-43ea-bae7-d186eaa53c6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.894s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.561962] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167251, 'name': ReconfigVM_Task, 'duration_secs': 0.302705} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.567024] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Reconfigured VM instance instance-00000049 to attach disk [datastore1] c0211ed8-5995-48f4-b339-99bd4c93254c/c0211ed8-5995-48f4-b339-99bd4c93254c.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 923.567024] env[68569]: DEBUG oslo_vmware.api [None req-e4b32342-fc13-4cce-ab73-df3efe928560 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167252, 'name': PowerOffVM_Task, 'duration_secs': 0.239655} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.567024] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-231b1be2-767a-4afb-b109-5bfedd61fa36 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.568422] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4b32342-fc13-4cce-ab73-df3efe928560 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 923.568422] env[68569]: DEBUG nova.compute.manager [None req-e4b32342-fc13-4cce-ab73-df3efe928560 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 923.570299] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e2a0d6-51c1-4202-9357-a0bdc91b27bd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.581708] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 923.581708] env[68569]: value = "task-3167254" [ 923.581708] env[68569]: _type = "Task" [ 923.581708] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.596387] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167254, 'name': Rename_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.737784] env[68569]: DEBUG nova.network.neutron [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Updating instance_info_cache with network_info: [{"id": "d13b7716-5b05-4896-9da9-e3674d55a3a0", "address": "fa:16:3e:c3:61:8b", "network": {"id": "63b6f3d4-66a9-477c-b7a6-566cf0d9ab2d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-638108957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b924e47d91a488e9d912a5369792aa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd13b7716-5b", "ovs_interfaceid": "d13b7716-5b05-4896-9da9-e3674d55a3a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.771739] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167253, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.821050] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e88c76-709e-b105-6db8-d4f17fc03a25, 'name': SearchDatastore_Task, 'duration_secs': 0.015126} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.821388] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.821631] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 923.821873] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.822489] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.822819] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 923.823407] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f5c47b4-16ce-4ff6-8198-42e57f47ca95 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.836013] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 923.836230] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 923.837111] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c0fa0e5-86a4-4c75-90fd-dc003afe65e2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.845032] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 923.845032] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525e5e43-13ad-3936-6985-780ce87d3ed2" [ 923.845032] env[68569]: _type = "Task" [ 923.845032] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.854393] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525e5e43-13ad-3936-6985-780ce87d3ed2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.889697] env[68569]: DEBUG nova.compute.utils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 923.894817] env[68569]: DEBUG nova.compute.manager [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 923.894989] env[68569]: DEBUG nova.network.neutron [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 923.910219] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167250, 'name': CreateSnapshot_Task, 'duration_secs': 0.895681} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.910639] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 923.911425] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f717e5-8328-4a6e-9faf-e914d92d9fbf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.960186] env[68569]: DEBUG nova.policy [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '235d797ca5124e9e93003529681b968a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f81f435f5b644ab9c0b6bc809a0a9e0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 924.094113] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e4b32342-fc13-4cce-ab73-df3efe928560 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.084s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.102900] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167254, 'name': Rename_Task, 'duration_secs': 0.288351} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.103526] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 924.103956] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c36df43d-9a11-4c2e-bf50-4b35f6c6a10c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.116453] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 924.116453] env[68569]: value = "task-3167255" [ 924.116453] env[68569]: _type = "Task" [ 924.116453] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.126205] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167255, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.243012] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Releasing lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.243754] env[68569]: DEBUG nova.compute.manager [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Instance network_info: |[{"id": "d13b7716-5b05-4896-9da9-e3674d55a3a0", "address": "fa:16:3e:c3:61:8b", "network": {"id": "63b6f3d4-66a9-477c-b7a6-566cf0d9ab2d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-638108957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b924e47d91a488e9d912a5369792aa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd13b7716-5b", "ovs_interfaceid": "d13b7716-5b05-4896-9da9-e3674d55a3a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 924.244186] env[68569]: DEBUG oslo_concurrency.lockutils [req-57a9e055-40ed-4be1-91cd-36a8f750b579 req-1f0d132e-62e1-43cc-b823-6ef5a74ae1dd service nova] Acquired lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.244496] env[68569]: DEBUG nova.network.neutron [req-57a9e055-40ed-4be1-91cd-36a8f750b579 req-1f0d132e-62e1-43cc-b823-6ef5a74ae1dd service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Refreshing network info cache for port d13b7716-5b05-4896-9da9-e3674d55a3a0 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 924.246709] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:61:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5fe645c-e088-401e-ab53-4ae2981dea72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd13b7716-5b05-4896-9da9-e3674d55a3a0', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 924.256992] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Creating folder: Project (8b924e47d91a488e9d912a5369792aa5). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 924.261454] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c85a2802-7e09-45c3-8fc6-58fe987a37ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.264611] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d725d169-77be-4c1b-bfbd-d667b1a7af8b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.279217] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec28876-6b9c-4b39-ba49-e9d1b9edb9ef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.283526] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167253, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599499} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.285089] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 7696390d-a097-4b6d-827d-92f3165a4188/7696390d-a097-4b6d-827d-92f3165a4188.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 924.285372] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 924.285825] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Created folder: Project (8b924e47d91a488e9d912a5369792aa5) in parent group-v633430. [ 924.286038] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Creating folder: Instances. Parent ref: group-v633640. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 924.286681] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0127f49e-293a-43bd-bef1-bfe6e4a1484d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.288958] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-604ef678-966c-4c01-a100-a492a56988ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.325708] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "3551627b-9c90-43ea-bae7-d186eaa53c6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.326057] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "3551627b-9c90-43ea-bae7-d186eaa53c6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.326317] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "3551627b-9c90-43ea-bae7-d186eaa53c6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.326527] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "3551627b-9c90-43ea-bae7-d186eaa53c6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.326745] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "3551627b-9c90-43ea-bae7-d186eaa53c6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.331060] env[68569]: DEBUG nova.network.neutron [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Successfully created port: 9ff21d1d-5195-4832-bc11-4f0170d636b7 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 924.335023] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1124b2-0afe-46ae-bc21-e96260e718bb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.338046] env[68569]: INFO nova.compute.manager [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Terminating instance [ 924.341570] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 924.341570] env[68569]: value = "task-3167257" [ 924.341570] env[68569]: _type = "Task" [ 924.341570] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.349250] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Created folder: Instances in parent group-v633640. [ 924.349250] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 924.352648] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 924.357349] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae14f812-da75-4504-9348-bf28066fa2ce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.373595] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9389a9eb-00b6-4800-8e9f-23b025d0f1c0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.382032] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167257, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.388491] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525e5e43-13ad-3936-6985-780ce87d3ed2, 'name': SearchDatastore_Task, 'duration_secs': 0.018547} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.398566] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 924.398566] env[68569]: value = "task-3167259" [ 924.398566] env[68569]: _type = "Task" [ 924.398566] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.399490] env[68569]: DEBUG nova.compute.manager [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 924.405375] env[68569]: DEBUG nova.compute.provider_tree [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.405375] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed78b31c-25a8-414a-9b1b-6ff9787a2b69 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.417573] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167259, 'name': CreateVM_Task} progress is 15%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.421634] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 924.421634] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523ba5f5-3109-8692-6ce4-37160710b61b" [ 924.421634] env[68569]: _type = "Task" [ 924.421634] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.436252] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 924.441179] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-872afc1f-1c85-4515-b9cb-76b68f6d8b94 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.445184] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523ba5f5-3109-8692-6ce4-37160710b61b, 'name': SearchDatastore_Task, 'duration_secs': 0.012042} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.445559] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.445993] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 398dd3c7-c630-4a29-b204-80f6fb394ce8/398dd3c7-c630-4a29-b204-80f6fb394ce8.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 924.446712] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3bdf9a8-9e2f-4eaf-ac58-be09ec1c197e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.450827] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 924.450827] env[68569]: value = "task-3167260" [ 924.450827] env[68569]: _type = "Task" [ 924.450827] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.455974] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 924.455974] env[68569]: value = "task-3167261" [ 924.455974] env[68569]: _type = "Task" [ 924.455974] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.463260] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167260, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.473679] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167261, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.508259] env[68569]: DEBUG nova.objects.instance [None req-1a2c4456-6086-4548-a807-bc9255e46a58 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lazy-loading 'flavor' on Instance uuid a9e87dfc-6e00-4e55-8a8f-bc3174b991da {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 924.629288] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167255, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.685348] env[68569]: DEBUG nova.network.neutron [req-57a9e055-40ed-4be1-91cd-36a8f750b579 req-1f0d132e-62e1-43cc-b823-6ef5a74ae1dd service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Updated VIF entry in instance network info cache for port d13b7716-5b05-4896-9da9-e3674d55a3a0. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 924.685594] env[68569]: DEBUG nova.network.neutron [req-57a9e055-40ed-4be1-91cd-36a8f750b579 req-1f0d132e-62e1-43cc-b823-6ef5a74ae1dd service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Updating instance_info_cache with network_info: [{"id": "d13b7716-5b05-4896-9da9-e3674d55a3a0", "address": "fa:16:3e:c3:61:8b", "network": {"id": "63b6f3d4-66a9-477c-b7a6-566cf0d9ab2d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-638108957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b924e47d91a488e9d912a5369792aa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd13b7716-5b", "ovs_interfaceid": "d13b7716-5b05-4896-9da9-e3674d55a3a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.856017] env[68569]: DEBUG nova.compute.manager [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 924.856384] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 924.856614] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167257, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072529} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.857435] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26125207-a362-4bd9-b6c4-5ae140ec9c4b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.860396] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 924.861254] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12398ea4-d8d0-497a-a265-c1fda6db6614 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.887994] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 7696390d-a097-4b6d-827d-92f3165a4188/7696390d-a097-4b6d-827d-92f3165a4188.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 924.890538] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a557658-57f4-450c-b357-4c577e71af5c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.905517] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 924.906117] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24c297cd-7355-4628-83bc-d1af26451f4f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.908742] env[68569]: DEBUG nova.scheduler.client.report [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 924.921601] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 924.921601] env[68569]: value = "task-3167262" [ 924.921601] env[68569]: _type = "Task" [ 924.921601] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.926685] env[68569]: DEBUG oslo_vmware.api [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 924.926685] env[68569]: value = "task-3167263" [ 924.926685] env[68569]: _type = "Task" [ 924.926685] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.937947] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167259, 'name': CreateVM_Task, 'duration_secs': 0.475366} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.938226] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167262, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.938975] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 924.940321] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.940321] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.940321] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 924.940552] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4b94f08-e8de-4165-836d-dc94cf158e67 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.945850] env[68569]: DEBUG oslo_vmware.api [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167263, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.951736] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 924.951736] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a2785a-c118-07f8-26f9-de820eeaee3c" [ 924.951736] env[68569]: _type = "Task" [ 924.951736] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.968724] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167260, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.975326] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a2785a-c118-07f8-26f9-de820eeaee3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.975544] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167261, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.014558] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1a2c4456-6086-4548-a807-bc9255e46a58 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.014788] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1a2c4456-6086-4548-a807-bc9255e46a58 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.014974] env[68569]: DEBUG nova.network.neutron [None req-1a2c4456-6086-4548-a807-bc9255e46a58 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 925.018021] env[68569]: DEBUG nova.objects.instance [None req-1a2c4456-6086-4548-a807-bc9255e46a58 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lazy-loading 'info_cache' on Instance uuid a9e87dfc-6e00-4e55-8a8f-bc3174b991da {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 925.129293] env[68569]: DEBUG oslo_vmware.api [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167255, 'name': PowerOnVM_Task, 'duration_secs': 0.538564} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.129599] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 925.129790] env[68569]: INFO nova.compute.manager [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Took 9.38 seconds to spawn the instance on the hypervisor. [ 925.130095] env[68569]: DEBUG nova.compute.manager [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 925.130778] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13d4fa4-4305-4eff-908d-cfac40c78346 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.188780] env[68569]: DEBUG oslo_concurrency.lockutils [req-57a9e055-40ed-4be1-91cd-36a8f750b579 req-1f0d132e-62e1-43cc-b823-6ef5a74ae1dd service nova] Releasing lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.422902] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.037s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.427266] env[68569]: DEBUG nova.compute.manager [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 925.430919] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.164s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.430919] env[68569]: DEBUG nova.objects.instance [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68569) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 925.445937] env[68569]: DEBUG oslo_vmware.api [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167263, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.450122] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167262, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.460101] env[68569]: INFO nova.scheduler.client.report [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Deleted allocations for instance 0943cfd5-33fb-4b02-9e4d-93f18385bdae [ 925.475672] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167260, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.482114] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a2785a-c118-07f8-26f9-de820eeaee3c, 'name': SearchDatastore_Task, 'duration_secs': 0.022189} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.484833] env[68569]: DEBUG nova.virt.hardware [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 925.485079] env[68569]: DEBUG nova.virt.hardware [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 925.485235] env[68569]: DEBUG nova.virt.hardware [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 925.486075] env[68569]: DEBUG nova.virt.hardware [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 925.486333] env[68569]: DEBUG nova.virt.hardware [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 925.486689] env[68569]: DEBUG nova.virt.hardware [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 925.486999] env[68569]: DEBUG nova.virt.hardware [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 925.487250] env[68569]: DEBUG nova.virt.hardware [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 925.487455] env[68569]: DEBUG nova.virt.hardware [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 925.487775] env[68569]: DEBUG nova.virt.hardware [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 925.488131] env[68569]: DEBUG nova.virt.hardware [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 925.489942] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.489942] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.489942] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.489942] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.490145] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.491133] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f100a8-98e8-4052-a059-14224dacaf54 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.498840] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5b2f7e2-1ad1-4766-8b64-2628d72e6f3b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.502590] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167261, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531771} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.503413] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 398dd3c7-c630-4a29-b204-80f6fb394ce8/398dd3c7-c630-4a29-b204-80f6fb394ce8.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 925.503744] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 925.505134] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-20cd1289-2342-4525-870a-242cf91b4162 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.522024] env[68569]: DEBUG nova.objects.base [None req-1a2c4456-6086-4548-a807-bc9255e46a58 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 925.529714] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 925.529714] env[68569]: value = "task-3167264" [ 925.529714] env[68569]: _type = "Task" [ 925.529714] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.532660] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9990ed-b9cc-4a7a-a6a6-383610929da7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.539532] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.539532] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 925.543123] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8a76e69-3192-44e0-bcee-7745bdf2289b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.564438] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 925.564438] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5235575c-aa2a-1b2f-cd53-4a2362e6b9e8" [ 925.564438] env[68569]: _type = "Task" [ 925.564438] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.564438] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167264, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.575173] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5235575c-aa2a-1b2f-cd53-4a2362e6b9e8, 'name': SearchDatastore_Task, 'duration_secs': 0.011831} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.576051] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d427756b-fd8e-4741-8128-f74e0608e7c0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.582869] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 925.582869] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ceefb0-baf0-6fed-9b0a-c631759de7a5" [ 925.582869] env[68569]: _type = "Task" [ 925.582869] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.593946] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ceefb0-baf0-6fed-9b0a-c631759de7a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.649512] env[68569]: INFO nova.compute.manager [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Took 49.15 seconds to build instance. [ 925.949559] env[68569]: DEBUG oslo_vmware.api [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167263, 'name': PowerOffVM_Task, 'duration_secs': 0.575013} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.949559] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167262, 'name': ReconfigVM_Task, 'duration_secs': 0.828424} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.950098] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 925.950098] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 925.952026] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 7696390d-a097-4b6d-827d-92f3165a4188/7696390d-a097-4b6d-827d-92f3165a4188.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 925.952026] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7c37006-a5af-487d-9215-36f4c9fd84f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.952409] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04b49f84-bd97-47cc-8146-40375806a8ca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.964957] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167260, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.966402] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 925.966402] env[68569]: value = "task-3167266" [ 925.966402] env[68569]: _type = "Task" [ 925.966402] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.979101] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167266, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.979570] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b3de90-d781-49c5-ae93-ea891eb8cda3 tempest-ServerAddressesNegativeTestJSON-3836094 tempest-ServerAddressesNegativeTestJSON-3836094-project-member] Lock "0943cfd5-33fb-4b02-9e4d-93f18385bdae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.228s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.049809] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167264, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071443} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.051494] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 926.051974] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 926.052879] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 926.053176] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Deleting the datastore file [datastore2] 3551627b-9c90-43ea-bae7-d186eaa53c6b {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 926.054079] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa24cac-6516-4ca9-bc19-4cdb2cf3986c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.056912] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-91391a19-1719-4c35-b2b2-7175cd3005b6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.083482] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 398dd3c7-c630-4a29-b204-80f6fb394ce8/398dd3c7-c630-4a29-b204-80f6fb394ce8.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 926.087606] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ebb3cda-dbbe-44b9-ae07-0bae6c44ed8f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.103498] env[68569]: DEBUG oslo_vmware.api [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for the task: (returnval){ [ 926.103498] env[68569]: value = "task-3167267" [ 926.103498] env[68569]: _type = "Task" [ 926.103498] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.114499] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 926.114499] env[68569]: value = "task-3167268" [ 926.114499] env[68569]: _type = "Task" [ 926.114499] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.118414] env[68569]: DEBUG oslo_vmware.api [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167267, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.126162] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ceefb0-baf0-6fed-9b0a-c631759de7a5, 'name': SearchDatastore_Task, 'duration_secs': 0.011536} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.127298] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.127298] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] c0ea0ef8-93c2-416a-8caa-a51f7a39627e/c0ea0ef8-93c2-416a-8caa-a51f7a39627e.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 926.127907] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c84b70fc-7faa-4c09-8f9c-5c8e4753b129 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.134394] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167268, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.140111] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 926.140111] env[68569]: value = "task-3167269" [ 926.140111] env[68569]: _type = "Task" [ 926.140111] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.151366] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4aa245f1-5958-4106-b376-58c98bb93a6f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "c0211ed8-5995-48f4-b339-99bd4c93254c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.220s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.151699] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167269, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.451461] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5411de94-119b-4746-ae0c-cb2b08933af2 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.022s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.455472] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.247s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.455913] env[68569]: DEBUG nova.objects.instance [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Lazy-loading 'resources' on Instance uuid 559a1eff-8892-4dda-a540-4a053ae0ef2b {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 926.478889] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167260, 'name': CloneVM_Task, 'duration_secs': 1.810559} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.479772] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Created linked-clone VM from snapshot [ 926.481242] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4459b1-ede6-43e6-9f82-6d32e3ffa195 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.488532] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167266, 'name': Rename_Task, 'duration_secs': 0.392354} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.489338] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 926.489693] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a5a6f6f-69a8-4a81-b921-4859117fe3e0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.495064] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Uploading image 53cd4603-1b59-4c8d-a074-19290ed071a6 {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 926.507048] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 926.507048] env[68569]: value = "task-3167270" [ 926.507048] env[68569]: _type = "Task" [ 926.507048] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.526115] env[68569]: DEBUG oslo_vmware.rw_handles [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 926.526115] env[68569]: value = "vm-633643" [ 926.526115] env[68569]: _type = "VirtualMachine" [ 926.526115] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 926.527070] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-61cd3169-82e7-49d2-9e48-bb4e118360bc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.542682] env[68569]: DEBUG oslo_vmware.rw_handles [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lease: (returnval){ [ 926.542682] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52262aec-7344-9109-6c9d-e789c7916e0a" [ 926.542682] env[68569]: _type = "HttpNfcLease" [ 926.542682] env[68569]: } obtained for exporting VM: (result){ [ 926.542682] env[68569]: value = "vm-633643" [ 926.542682] env[68569]: _type = "VirtualMachine" [ 926.542682] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 926.543138] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the lease: (returnval){ [ 926.543138] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52262aec-7344-9109-6c9d-e789c7916e0a" [ 926.543138] env[68569]: _type = "HttpNfcLease" [ 926.543138] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 926.557881] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 926.557881] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52262aec-7344-9109-6c9d-e789c7916e0a" [ 926.557881] env[68569]: _type = "HttpNfcLease" [ 926.557881] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 926.559783] env[68569]: DEBUG oslo_vmware.rw_handles [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 926.559783] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52262aec-7344-9109-6c9d-e789c7916e0a" [ 926.559783] env[68569]: _type = "HttpNfcLease" [ 926.559783] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 926.560685] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3707679-74df-4a6d-9b10-d0dd9b034831 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.575373] env[68569]: DEBUG oslo_vmware.rw_handles [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d5523f-80ee-b676-38b4-4af20312cb6b/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 926.575373] env[68569]: DEBUG oslo_vmware.rw_handles [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d5523f-80ee-b676-38b4-4af20312cb6b/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 926.575475] env[68569]: DEBUG nova.network.neutron [None req-1a2c4456-6086-4548-a807-bc9255e46a58 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Updating instance_info_cache with network_info: [{"id": "4711426c-2c79-4cc9-8144-86c583dd1fc2", "address": "fa:16:3e:58:9a:77", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4711426c-2c", "ovs_interfaceid": "4711426c-2c79-4cc9-8144-86c583dd1fc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.650944] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1a2c4456-6086-4548-a807-bc9255e46a58 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.679771] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167269, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.681023] env[68569]: DEBUG oslo_vmware.api [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Task: {'id': task-3167267, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171091} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.684181] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 926.684775] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 926.684775] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 926.684775] env[68569]: INFO nova.compute.manager [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Took 1.83 seconds to destroy the instance on the hypervisor. [ 926.684990] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 926.685587] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167268, 'name': ReconfigVM_Task, 'duration_secs': 0.366582} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.685856] env[68569]: DEBUG nova.compute.manager [-] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 926.685949] env[68569]: DEBUG nova.network.neutron [-] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 926.687933] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 398dd3c7-c630-4a29-b204-80f6fb394ce8/398dd3c7-c630-4a29-b204-80f6fb394ce8.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.689095] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-184329ea-5a49-4e84-8b99-b391aacdfa66 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.697503] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 926.697503] env[68569]: value = "task-3167272" [ 926.697503] env[68569]: _type = "Task" [ 926.697503] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.705060] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ae26275a-89c7-4e4b-8628-3b64a7f33bc5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.712961] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167272, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.019881] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167270, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.161682] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167269, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.546732} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.162344] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] c0ea0ef8-93c2-416a-8caa-a51f7a39627e/c0ea0ef8-93c2-416a-8caa-a51f7a39627e.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 927.162966] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 927.162966] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-22d534d1-bd6e-49eb-931a-38ec7f3cfa4f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.168044] env[68569]: INFO nova.compute.manager [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Rebuilding instance [ 927.173945] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a2c4456-6086-4548-a807-bc9255e46a58 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 927.176300] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ffec4c10-b4a7-4ad3-97e9-7d490028530f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.179062] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 927.179062] env[68569]: value = "task-3167273" [ 927.179062] env[68569]: _type = "Task" [ 927.179062] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.188257] env[68569]: DEBUG oslo_vmware.api [None req-1a2c4456-6086-4548-a807-bc9255e46a58 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 927.188257] env[68569]: value = "task-3167274" [ 927.188257] env[68569]: _type = "Task" [ 927.188257] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.191611] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167273, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.210244] env[68569]: DEBUG oslo_vmware.api [None req-1a2c4456-6086-4548-a807-bc9255e46a58 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167274, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.215819] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167272, 'name': Rename_Task, 'duration_secs': 0.174273} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.224441] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 927.228079] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f308f61e-38da-4684-bebc-4b315dd79d11 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.237151] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 927.237151] env[68569]: value = "task-3167275" [ 927.237151] env[68569]: _type = "Task" [ 927.237151] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.247477] env[68569]: DEBUG nova.compute.manager [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 927.248587] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7637c6-e793-4943-9cd3-30664aa5236b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.259335] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167275, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.307581] env[68569]: DEBUG nova.compute.manager [req-5ab2efab-25b4-4474-a69b-9fca0961237d req-d249ca2f-c348-4138-89e0-2587eee65353 service nova] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Received event network-vif-plugged-9ff21d1d-5195-4832-bc11-4f0170d636b7 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 927.308193] env[68569]: DEBUG oslo_concurrency.lockutils [req-5ab2efab-25b4-4474-a69b-9fca0961237d req-d249ca2f-c348-4138-89e0-2587eee65353 service nova] Acquiring lock "2cf8803a-8078-4832-a736-330d6bcde6ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.308444] env[68569]: DEBUG oslo_concurrency.lockutils [req-5ab2efab-25b4-4474-a69b-9fca0961237d req-d249ca2f-c348-4138-89e0-2587eee65353 service nova] Lock "2cf8803a-8078-4832-a736-330d6bcde6ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.308679] env[68569]: DEBUG oslo_concurrency.lockutils [req-5ab2efab-25b4-4474-a69b-9fca0961237d req-d249ca2f-c348-4138-89e0-2587eee65353 service nova] Lock "2cf8803a-8078-4832-a736-330d6bcde6ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.310259] env[68569]: DEBUG nova.compute.manager [req-5ab2efab-25b4-4474-a69b-9fca0961237d req-d249ca2f-c348-4138-89e0-2587eee65353 service nova] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] No waiting events found dispatching network-vif-plugged-9ff21d1d-5195-4832-bc11-4f0170d636b7 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 927.310259] env[68569]: WARNING nova.compute.manager [req-5ab2efab-25b4-4474-a69b-9fca0961237d req-d249ca2f-c348-4138-89e0-2587eee65353 service nova] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Received unexpected event network-vif-plugged-9ff21d1d-5195-4832-bc11-4f0170d636b7 for instance with vm_state building and task_state spawning. [ 927.338149] env[68569]: DEBUG nova.compute.manager [req-033f074e-5695-4fc4-8922-b5853addab59 req-b77db7b1-1f2d-428d-b7d3-982ea20a2e3a service nova] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Received event network-vif-deleted-67d7d837-0e5c-4bba-969a-779af4075541 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 927.338749] env[68569]: INFO nova.compute.manager [req-033f074e-5695-4fc4-8922-b5853addab59 req-b77db7b1-1f2d-428d-b7d3-982ea20a2e3a service nova] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Neutron deleted interface 67d7d837-0e5c-4bba-969a-779af4075541; detaching it from the instance and deleting it from the info cache [ 927.339131] env[68569]: DEBUG nova.network.neutron [req-033f074e-5695-4fc4-8922-b5853addab59 req-b77db7b1-1f2d-428d-b7d3-982ea20a2e3a service nova] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.498180] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d488d1-991c-4e1f-94f6-7af5711d0bcc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.516070] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc831fdf-6d71-4e93-b08f-edc19b85c6e6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.557713] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167270, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.558825] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d72d99-1ba4-46bc-b262-cba3a71a30df {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.569368] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cee509-3bdf-446b-814f-bb5756629aac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.586173] env[68569]: DEBUG nova.compute.provider_tree [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.589137] env[68569]: DEBUG nova.network.neutron [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Successfully updated port: 9ff21d1d-5195-4832-bc11-4f0170d636b7 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 927.618967] env[68569]: DEBUG nova.network.neutron [-] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.693820] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167273, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115852} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.697621] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 927.698630] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445833a2-e51b-45f9-9a94-23bafc7cb45d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.709659] env[68569]: DEBUG oslo_vmware.api [None req-1a2c4456-6086-4548-a807-bc9255e46a58 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167274, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.733148] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] c0ea0ef8-93c2-416a-8caa-a51f7a39627e/c0ea0ef8-93c2-416a-8caa-a51f7a39627e.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 927.733539] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e41f664-e280-4b9d-801a-8bf83d719d16 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.763358] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167275, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.765976] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 927.765976] env[68569]: value = "task-3167276" [ 927.765976] env[68569]: _type = "Task" [ 927.765976] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.779750] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167276, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.846838] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-25cdca86-9a7b-46f3-bf0b-f45a53ab3a2a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.863895] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b18f7f-c6d8-4598-bc67-1bdf52490836 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.920802] env[68569]: DEBUG nova.compute.manager [req-033f074e-5695-4fc4-8922-b5853addab59 req-b77db7b1-1f2d-428d-b7d3-982ea20a2e3a service nova] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Detach interface failed, port_id=67d7d837-0e5c-4bba-969a-779af4075541, reason: Instance 3551627b-9c90-43ea-bae7-d186eaa53c6b could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 928.030035] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167270, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.090029] env[68569]: DEBUG nova.scheduler.client.report [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 928.095473] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Acquiring lock "refresh_cache-2cf8803a-8078-4832-a736-330d6bcde6ce" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.095638] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Acquired lock "refresh_cache-2cf8803a-8078-4832-a736-330d6bcde6ce" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.095834] env[68569]: DEBUG nova.network.neutron [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 928.122128] env[68569]: INFO nova.compute.manager [-] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Took 1.44 seconds to deallocate network for instance. [ 928.210588] env[68569]: DEBUG oslo_vmware.api [None req-1a2c4456-6086-4548-a807-bc9255e46a58 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167274, 'name': PowerOnVM_Task, 'duration_secs': 0.527121} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.210588] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a2c4456-6086-4548-a807-bc9255e46a58 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 928.210588] env[68569]: DEBUG nova.compute.manager [None req-1a2c4456-6086-4548-a807-bc9255e46a58 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 928.210588] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52d315e-5ecd-4bd1-89e8-ff8d9f893316 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.261962] env[68569]: DEBUG oslo_vmware.api [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167275, 'name': PowerOnVM_Task, 'duration_secs': 0.650069} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.263500] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 928.263731] env[68569]: DEBUG nova.compute.manager [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 928.264723] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d24ba3a-de48-4b14-b5dc-426e8260cafc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.286873] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 928.287412] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167276, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.288361] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec7ca816-1828-45a5-add6-f763ceee439c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.299499] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 928.299499] env[68569]: value = "task-3167277" [ 928.299499] env[68569]: _type = "Task" [ 928.299499] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.309748] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167277, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.530425] env[68569]: DEBUG oslo_vmware.api [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167270, 'name': PowerOnVM_Task, 'duration_secs': 1.529184} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.530963] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 928.531331] env[68569]: INFO nova.compute.manager [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Took 10.30 seconds to spawn the instance on the hypervisor. [ 928.531629] env[68569]: DEBUG nova.compute.manager [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 928.532879] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb43d42-65cd-4e06-b7bb-acbf8521a6c4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.601982] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.147s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.633970] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.635808] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.635808] env[68569]: DEBUG nova.objects.instance [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lazy-loading 'resources' on Instance uuid 3551627b-9c90-43ea-bae7-d186eaa53c6b {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 928.637882] env[68569]: INFO nova.scheduler.client.report [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Deleted allocations for instance 559a1eff-8892-4dda-a540-4a053ae0ef2b [ 928.680400] env[68569]: DEBUG nova.network.neutron [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 928.782413] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167276, 'name': ReconfigVM_Task, 'duration_secs': 0.52634} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.783548] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Reconfigured VM instance instance-0000004b to attach disk [datastore1] c0ea0ef8-93c2-416a-8caa-a51f7a39627e/c0ea0ef8-93c2-416a-8caa-a51f7a39627e.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 928.783548] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16a19ee8-5019-4a85-9b64-c9bb7664ce00 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.801339] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 928.801339] env[68569]: value = "task-3167278" [ 928.801339] env[68569]: _type = "Task" [ 928.801339] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.823020] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.834946] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167277, 'name': PowerOffVM_Task, 'duration_secs': 0.303562} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.835665] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167278, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.835964] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 928.836242] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 928.837235] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ac9620-bc08-4a6f-a4bd-e6fce06820d3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.851517] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 928.851802] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-593ffdf9-b740-4cb2-adf3-af9e265ab647 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.941400] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 928.941400] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 928.941400] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleting the datastore file [datastore1] c0211ed8-5995-48f4-b339-99bd4c93254c {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 928.944622] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-98b79db8-6bf0-4bab-b08c-bd5d3a87abaa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.954387] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 928.954387] env[68569]: value = "task-3167280" [ 928.954387] env[68569]: _type = "Task" [ 928.954387] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.965221] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167280, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.023596] env[68569]: DEBUG nova.network.neutron [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Updating instance_info_cache with network_info: [{"id": "9ff21d1d-5195-4832-bc11-4f0170d636b7", "address": "fa:16:3e:e9:2f:38", "network": {"id": "5b49b36a-77a1-42bf-9c77-f266776a0158", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1981144562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f81f435f5b644ab9c0b6bc809a0a9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dad4f433-bb0b-45c7-8040-972ef2277f75", "external-id": "nsx-vlan-transportzone-451", "segmentation_id": 451, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ff21d1d-51", "ovs_interfaceid": "9ff21d1d-5195-4832-bc11-4f0170d636b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.065436] env[68569]: INFO nova.compute.manager [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Took 50.00 seconds to build instance. [ 929.151084] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e34180b7-b916-45a2-b52c-745354e4ee01 tempest-ServersListShow2100Test-1238337676 tempest-ServersListShow2100Test-1238337676-project-member] Lock "559a1eff-8892-4dda-a540-4a053ae0ef2b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.401s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.323478] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167278, 'name': Rename_Task, 'duration_secs': 0.244282} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.324282] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 929.328900] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd58162f-a0f0-48a2-bd0c-f79dfb8cea09 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.337231] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 929.337231] env[68569]: value = "task-3167281" [ 929.337231] env[68569]: _type = "Task" [ 929.337231] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.346943] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167281, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.470582] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167280, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.34865} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.470874] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 929.471172] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 929.471349] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 929.529227] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Releasing lock "refresh_cache-2cf8803a-8078-4832-a736-330d6bcde6ce" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.530687] env[68569]: DEBUG nova.compute.manager [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Instance network_info: |[{"id": "9ff21d1d-5195-4832-bc11-4f0170d636b7", "address": "fa:16:3e:e9:2f:38", "network": {"id": "5b49b36a-77a1-42bf-9c77-f266776a0158", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1981144562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f81f435f5b644ab9c0b6bc809a0a9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dad4f433-bb0b-45c7-8040-972ef2277f75", "external-id": "nsx-vlan-transportzone-451", "segmentation_id": 451, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ff21d1d-51", "ovs_interfaceid": "9ff21d1d-5195-4832-bc11-4f0170d636b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 929.530885] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:2f:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dad4f433-bb0b-45c7-8040-972ef2277f75', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9ff21d1d-5195-4832-bc11-4f0170d636b7', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 929.543367] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Creating folder: Project (0f81f435f5b644ab9c0b6bc809a0a9e0). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 929.549883] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e59ac73-1ffc-4e2a-88ae-aaf1613e743c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.556237] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "f5f8b054-7ee4-40f5-84de-1cee02949cd2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.556545] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "f5f8b054-7ee4-40f5-84de-1cee02949cd2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.569488] env[68569]: DEBUG oslo_concurrency.lockutils [None req-fa4068f2-91c9-41a0-8ce1-d7e74e9a1949 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "7696390d-a097-4b6d-827d-92f3165a4188" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.975s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.570170] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Created folder: Project (0f81f435f5b644ab9c0b6bc809a0a9e0) in parent group-v633430. [ 929.570361] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Creating folder: Instances. Parent ref: group-v633644. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 929.570640] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-544ac509-0609-4d7b-965f-271d770ca8ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.586214] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Created folder: Instances in parent group-v633644. [ 929.586608] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 929.590023] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 929.590023] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3834d5c5-141c-4598-b451-f9a45d558878 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.614588] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 929.614588] env[68569]: value = "task-3167284" [ 929.614588] env[68569]: _type = "Task" [ 929.614588] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.619821] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa921989-1799-4724-9706-15af0f7c4ec0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.626039] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167284, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.632459] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2311d5b2-9d72-4bcd-a95b-3380477b95b3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.672427] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc818a5-6b34-4362-81ed-a82345f2ada1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.683350] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66216a03-8b51-4a58-bf3d-ccfa2c0ca729 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.699759] env[68569]: DEBUG nova.compute.provider_tree [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.762834] env[68569]: INFO nova.compute.manager [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Rescuing [ 929.763167] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "refresh_cache-7696390d-a097-4b6d-827d-92f3165a4188" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.763369] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquired lock "refresh_cache-7696390d-a097-4b6d-827d-92f3165a4188" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.763553] env[68569]: DEBUG nova.network.neutron [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.854208] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167281, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.955389] env[68569]: DEBUG nova.compute.manager [req-ddca26f7-eee5-4107-8078-5ded6d842169 req-735d42b6-edce-4909-b63a-f72a29bdf6af service nova] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Received event network-changed-9ff21d1d-5195-4832-bc11-4f0170d636b7 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 929.955581] env[68569]: DEBUG nova.compute.manager [req-ddca26f7-eee5-4107-8078-5ded6d842169 req-735d42b6-edce-4909-b63a-f72a29bdf6af service nova] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Refreshing instance network info cache due to event network-changed-9ff21d1d-5195-4832-bc11-4f0170d636b7. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 929.955796] env[68569]: DEBUG oslo_concurrency.lockutils [req-ddca26f7-eee5-4107-8078-5ded6d842169 req-735d42b6-edce-4909-b63a-f72a29bdf6af service nova] Acquiring lock "refresh_cache-2cf8803a-8078-4832-a736-330d6bcde6ce" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.955937] env[68569]: DEBUG oslo_concurrency.lockutils [req-ddca26f7-eee5-4107-8078-5ded6d842169 req-735d42b6-edce-4909-b63a-f72a29bdf6af service nova] Acquired lock "refresh_cache-2cf8803a-8078-4832-a736-330d6bcde6ce" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.957290] env[68569]: DEBUG nova.network.neutron [req-ddca26f7-eee5-4107-8078-5ded6d842169 req-735d42b6-edce-4909-b63a-f72a29bdf6af service nova] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Refreshing network info cache for port 9ff21d1d-5195-4832-bc11-4f0170d636b7 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.060135] env[68569]: DEBUG nova.compute.manager [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 930.130292] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167284, 'name': CreateVM_Task, 'duration_secs': 0.510099} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.130628] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 930.132695] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.133688] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.133688] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 930.133992] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7812285b-0bda-414a-b358-97f68a20e0a9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.140169] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Waiting for the task: (returnval){ [ 930.140169] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b53137-fe94-10c7-9e66-99b6c00d65f0" [ 930.140169] env[68569]: _type = "Task" [ 930.140169] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.150196] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b53137-fe94-10c7-9e66-99b6c00d65f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.203203] env[68569]: DEBUG nova.scheduler.client.report [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 930.353180] env[68569]: DEBUG oslo_vmware.api [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167281, 'name': PowerOnVM_Task, 'duration_secs': 0.829663} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.353635] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 930.353908] env[68569]: INFO nova.compute.manager [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Took 9.54 seconds to spawn the instance on the hypervisor. [ 930.354201] env[68569]: DEBUG nova.compute.manager [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 930.355614] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0b2cba-87d6-4145-9313-4c3ae89e8c08 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.416200] env[68569]: INFO nova.compute.manager [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Rebuilding instance [ 930.456025] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "6b2120d3-2e4b-4d1b-8109-6513b3b320eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.456025] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "6b2120d3-2e4b-4d1b-8109-6513b3b320eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.495195] env[68569]: DEBUG nova.compute.manager [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 930.496130] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f777bcc-91ba-4456-82f5-05c1c3138c2f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.523845] env[68569]: DEBUG nova.virt.hardware [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 930.524287] env[68569]: DEBUG nova.virt.hardware [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.524429] env[68569]: DEBUG nova.virt.hardware [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 930.524613] env[68569]: DEBUG nova.virt.hardware [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.524785] env[68569]: DEBUG nova.virt.hardware [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 930.524925] env[68569]: DEBUG nova.virt.hardware [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 930.525243] env[68569]: DEBUG nova.virt.hardware [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 930.525456] env[68569]: DEBUG nova.virt.hardware [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 930.525661] env[68569]: DEBUG nova.virt.hardware [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 930.526106] env[68569]: DEBUG nova.virt.hardware [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 930.526392] env[68569]: DEBUG nova.virt.hardware [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 930.527386] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03efb50-25ad-4aa4-935a-ae7946d02b7e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.541464] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1e76a9-648e-4dc6-b846-b7a4e0b7e9a0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.562026] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:31:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3df34aa7-452f-49b6-9a1a-0e902948e0c6', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 930.569172] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 930.575321] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 930.578460] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7fdcc270-c6c6-4462-babc-d198f3738a70 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.604306] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 930.604306] env[68569]: value = "task-3167285" [ 930.604306] env[68569]: _type = "Task" [ 930.604306] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.615913] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167285, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.617371] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.655359] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b53137-fe94-10c7-9e66-99b6c00d65f0, 'name': SearchDatastore_Task, 'duration_secs': 0.012583} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.656604] env[68569]: DEBUG nova.network.neutron [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Updating instance_info_cache with network_info: [{"id": "d99f9970-89b3-4cf5-ae87-3e05a8b03ffa", "address": "fa:16:3e:41:a8:0e", "network": {"id": "f10bb94d-d811-400a-ada9-30d7720c1ddc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2037478415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "03a77ea99dbb445396a7ac5888135321", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd99f9970-89", "ovs_interfaceid": "d99f9970-89b3-4cf5-ae87-3e05a8b03ffa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.657885] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.661150] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 930.661150] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.661150] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.661150] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 930.661150] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61502267-c8b0-4130-8a4e-96173d90da4a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.674734] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 930.674824] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 930.678745] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c834388b-0e76-40e1-9357-dcc882a353ca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.688707] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Waiting for the task: (returnval){ [ 930.688707] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a2f90f-c198-d780-a12a-51ba086bf1d6" [ 930.688707] env[68569]: _type = "Task" [ 930.688707] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.700122] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a2f90f-c198-d780-a12a-51ba086bf1d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.709243] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.075s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.712219] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.890s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.714141] env[68569]: DEBUG nova.objects.instance [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68569) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 930.741532] env[68569]: INFO nova.scheduler.client.report [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Deleted allocations for instance 3551627b-9c90-43ea-bae7-d186eaa53c6b [ 930.879103] env[68569]: INFO nova.compute.manager [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Took 45.48 seconds to build instance. [ 930.936686] env[68569]: DEBUG nova.network.neutron [req-ddca26f7-eee5-4107-8078-5ded6d842169 req-735d42b6-edce-4909-b63a-f72a29bdf6af service nova] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Updated VIF entry in instance network info cache for port 9ff21d1d-5195-4832-bc11-4f0170d636b7. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 930.938260] env[68569]: DEBUG nova.network.neutron [req-ddca26f7-eee5-4107-8078-5ded6d842169 req-735d42b6-edce-4909-b63a-f72a29bdf6af service nova] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Updating instance_info_cache with network_info: [{"id": "9ff21d1d-5195-4832-bc11-4f0170d636b7", "address": "fa:16:3e:e9:2f:38", "network": {"id": "5b49b36a-77a1-42bf-9c77-f266776a0158", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1981144562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f81f435f5b644ab9c0b6bc809a0a9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dad4f433-bb0b-45c7-8040-972ef2277f75", "external-id": "nsx-vlan-transportzone-451", "segmentation_id": 451, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ff21d1d-51", "ovs_interfaceid": "9ff21d1d-5195-4832-bc11-4f0170d636b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.963146] env[68569]: DEBUG nova.compute.manager [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 931.115711] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167285, 'name': CreateVM_Task, 'duration_secs': 0.442335} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.115947] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 931.116966] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.117227] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.117817] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 931.117891] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72ea4e4f-cf4b-4c30-a0f6-ea52b1c7f688 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.124368] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 931.124368] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]528e1e01-102c-3859-4324-883d66d0b1b4" [ 931.124368] env[68569]: _type = "Task" [ 931.124368] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.138049] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]528e1e01-102c-3859-4324-883d66d0b1b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.160306] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Releasing lock "refresh_cache-7696390d-a097-4b6d-827d-92f3165a4188" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.203172] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a2f90f-c198-d780-a12a-51ba086bf1d6, 'name': SearchDatastore_Task, 'duration_secs': 0.015341} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.203172] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-306119aa-107f-43f3-82d7-eaac01019180 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.207827] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Waiting for the task: (returnval){ [ 931.207827] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d91b0e-ebf7-6118-85c0-cb6c1729f4ef" [ 931.207827] env[68569]: _type = "Task" [ 931.207827] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.221391] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d91b0e-ebf7-6118-85c0-cb6c1729f4ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.254985] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1d9b350b-ec8c-4a9f-b27a-413970f5d42c tempest-ImagesOneServerNegativeTestJSON-1055789778 tempest-ImagesOneServerNegativeTestJSON-1055789778-project-member] Lock "3551627b-9c90-43ea-bae7-d186eaa53c6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.929s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.382125] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3ae1c5ca-f2cc-42ca-87ad-45a0b8d5b2e9 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.128s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.440943] env[68569]: DEBUG oslo_concurrency.lockutils [req-ddca26f7-eee5-4107-8078-5ded6d842169 req-735d42b6-edce-4909-b63a-f72a29bdf6af service nova] Releasing lock "refresh_cache-2cf8803a-8078-4832-a736-330d6bcde6ce" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.490361] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.514915] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 931.514915] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a8917bb-3870-45c1-88ca-1798020d263c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.523081] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 931.523081] env[68569]: value = "task-3167286" [ 931.523081] env[68569]: _type = "Task" [ 931.523081] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.533978] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167286, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.636841] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]528e1e01-102c-3859-4324-883d66d0b1b4, 'name': SearchDatastore_Task, 'duration_secs': 0.013698} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.637209] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.637459] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 931.637693] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.727863] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1e3528e0-d8e6-463d-ad4c-ccf21b3745d8 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.727863] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d91b0e-ebf7-6118-85c0-cb6c1729f4ef, 'name': SearchDatastore_Task, 'duration_secs': 0.013322} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.727863] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.110s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.729579] env[68569]: INFO nova.compute.claims [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 931.736273] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.736659] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 2cf8803a-8078-4832-a736-330d6bcde6ce/2cf8803a-8078-4832-a736-330d6bcde6ce.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 931.737413] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.737619] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 931.737850] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20204045-f547-473f-838d-d9372d564ecd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.741471] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f362393b-56c3-46da-8973-2533145a6fcc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.759088] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Waiting for the task: (returnval){ [ 931.759088] env[68569]: value = "task-3167287" [ 931.759088] env[68569]: _type = "Task" [ 931.759088] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.759088] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 931.759088] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 931.759404] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2268afba-b1e5-4029-89fb-ccbe7df982a1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.772465] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 931.772465] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527ef164-44b8-d3c2-3d2a-f8d1db1e1e8b" [ 931.772465] env[68569]: _type = "Task" [ 931.772465] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.776791] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167287, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.788068] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527ef164-44b8-d3c2-3d2a-f8d1db1e1e8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.990857] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "e9994248-0240-412b-9e60-a04b00e5c0cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.991213] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "e9994248-0240-412b-9e60-a04b00e5c0cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.038314] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167286, 'name': PowerOffVM_Task, 'duration_secs': 0.373118} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.038314] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 932.038314] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 932.038594] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96573eae-f2b4-47db-842f-c8cdf8b3011f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.050485] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 932.050844] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-676d2c38-dcdb-4c2d-a93f-b03001c5f999 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.144134] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 932.144489] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 932.144983] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleting the datastore file [datastore1] 398dd3c7-c630-4a29-b204-80f6fb394ce8 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 932.144983] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95ff6831-bcd9-463e-b20d-4260779d4ec3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.156137] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 932.156137] env[68569]: value = "task-3167289" [ 932.156137] env[68569]: _type = "Task" [ 932.156137] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.173137] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167289, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.272712] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167287, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.289876] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527ef164-44b8-d3c2-3d2a-f8d1db1e1e8b, 'name': SearchDatastore_Task, 'duration_secs': 0.014596} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.290678] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec06c45e-cbf2-433f-a111-4ee71b637780 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.299436] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 932.299436] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d1159c-d4bd-2471-6206-20f4a6641b69" [ 932.299436] env[68569]: _type = "Task" [ 932.299436] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.311602] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d1159c-d4bd-2471-6206-20f4a6641b69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.494327] env[68569]: DEBUG nova.compute.manager [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 932.672961] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167289, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.267294} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.673302] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 932.673895] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 932.674140] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 932.698980] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 932.699312] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-867d151b-1229-420e-bee8-8af68716abce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.708415] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 932.708415] env[68569]: value = "task-3167290" [ 932.708415] env[68569]: _type = "Task" [ 932.708415] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.721769] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167290, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.773741] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167287, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600548} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.773933] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 2cf8803a-8078-4832-a736-330d6bcde6ce/2cf8803a-8078-4832-a736-330d6bcde6ce.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 932.774279] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 932.774516] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-92c5e345-d1d4-4604-b666-7f03b8bd8822 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.785749] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Waiting for the task: (returnval){ [ 932.785749] env[68569]: value = "task-3167291" [ 932.785749] env[68569]: _type = "Task" [ 932.785749] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.800592] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167291, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.811608] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d1159c-d4bd-2471-6206-20f4a6641b69, 'name': SearchDatastore_Task, 'duration_secs': 0.037165} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.814527] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.814802] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] c0211ed8-5995-48f4-b339-99bd4c93254c/c0211ed8-5995-48f4-b339-99bd4c93254c.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 932.816031] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78ab117b-24b5-4366-a552-f82b47798615 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.823978] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 932.823978] env[68569]: value = "task-3167292" [ 932.823978] env[68569]: _type = "Task" [ 932.823978] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.837839] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167292, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.020113] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.225803] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167290, 'name': PowerOffVM_Task, 'duration_secs': 0.257304} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.226295] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 933.227039] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0bd4da-01f7-474b-a1a5-073153d80127 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.260143] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d00303-3d76-45cd-a20c-e0757ed093b1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.306049] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167291, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.381009} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.308614] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 933.310112] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 933.310450] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60dd44af-2467-48a9-b1a1-bacae8513260 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.313601] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1335d984-153c-401b-b8e8-220f51f2ef0d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.317297] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9945bf3f-139d-4f3a-b46f-0a4f3bf98043 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.348806] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 2cf8803a-8078-4832-a736-330d6bcde6ce/2cf8803a-8078-4832-a736-330d6bcde6ce.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 933.355921] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d5542d9-5525-49d0-94ae-326a35bb7a25 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.373022] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 933.373022] env[68569]: value = "task-3167293" [ 933.373022] env[68569]: _type = "Task" [ 933.373022] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.373476] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf1ae05-bcd7-4060-85c1-bd06146b1adb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.388219] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167292, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.421909] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Waiting for the task: (returnval){ [ 933.421909] env[68569]: value = "task-3167294" [ 933.421909] env[68569]: _type = "Task" [ 933.421909] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.423835] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8354b8d2-c7b5-449e-8296-eabb9eb6ab87 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.427587] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] VM already powered off {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 933.427810] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.428074] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.428234] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.428410] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.433909] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0a2e08f-5b0d-4a74-806b-362e2f8519ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.446290] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f915595c-da76-4b84-8977-38bd9b536b8c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.451200] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167294, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.452713] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.452876] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 933.454088] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-067d1d9f-b4ba-4147-9ece-8f168c883874 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.465693] env[68569]: DEBUG nova.compute.provider_tree [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.468891] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 933.468891] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fcbc6c-ca20-8b77-3d40-0dca8f1296f1" [ 933.468891] env[68569]: _type = "Task" [ 933.468891] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.478941] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fcbc6c-ca20-8b77-3d40-0dca8f1296f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.715479] env[68569]: DEBUG nova.virt.hardware [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 933.715768] env[68569]: DEBUG nova.virt.hardware [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 933.717757] env[68569]: DEBUG nova.virt.hardware [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 933.717757] env[68569]: DEBUG nova.virt.hardware [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 933.718469] env[68569]: DEBUG nova.virt.hardware [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 933.718671] env[68569]: DEBUG nova.virt.hardware [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 933.718882] env[68569]: DEBUG nova.virt.hardware [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 933.719046] env[68569]: DEBUG nova.virt.hardware [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 933.719243] env[68569]: DEBUG nova.virt.hardware [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 933.719410] env[68569]: DEBUG nova.virt.hardware [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 933.719577] env[68569]: DEBUG nova.virt.hardware [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 933.720715] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184b8a89-3a2a-4e35-96bd-3b510c2d1672 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.731425] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25423448-b644-4fcb-af36-8550050c498e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.749525] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:8c:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd33839ae-40ca-471b-92e3-eb282b920682', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d1e0d6d-0c37-425c-a138-7bc79f96a3a2', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 933.756285] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 933.758143] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 933.758143] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f4f67f6-4680-4aab-9e99-6a219d08c069 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.782519] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 933.782519] env[68569]: value = "task-3167295" [ 933.782519] env[68569]: _type = "Task" [ 933.782519] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.795336] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167295, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.852024] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167292, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.80353} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.852024] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] c0211ed8-5995-48f4-b339-99bd4c93254c/c0211ed8-5995-48f4-b339-99bd4c93254c.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 933.852024] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 933.852024] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0582c213-3c54-47da-9f1b-694485b1dfdb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.861404] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 933.861404] env[68569]: value = "task-3167296" [ 933.861404] env[68569]: _type = "Task" [ 933.861404] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.872428] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167296, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.934325] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167294, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.974335] env[68569]: DEBUG nova.scheduler.client.report [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 933.995718] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fcbc6c-ca20-8b77-3d40-0dca8f1296f1, 'name': SearchDatastore_Task, 'duration_secs': 0.027922} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.997422] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2240c7b1-2a3a-4d1d-84d9-e959cb219777 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.004511] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 934.004511] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529e9b33-9379-3e54-133b-bf1e7e7d3d3e" [ 934.004511] env[68569]: _type = "Task" [ 934.004511] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.013880] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529e9b33-9379-3e54-133b-bf1e7e7d3d3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.294838] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167295, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.372328] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167296, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133309} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.372731] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 934.373307] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9111b3a-ba83-49aa-bb78-9887880ec7f2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.397531] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] c0211ed8-5995-48f4-b339-99bd4c93254c/c0211ed8-5995-48f4-b339-99bd4c93254c.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 934.397772] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-876f1943-b2db-4a63-9886-8602e1bebdd5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.419263] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 934.419263] env[68569]: value = "task-3167297" [ 934.419263] env[68569]: _type = "Task" [ 934.419263] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.430693] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167297, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.436322] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167294, 'name': ReconfigVM_Task, 'duration_secs': 0.762022} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.436607] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 2cf8803a-8078-4832-a736-330d6bcde6ce/2cf8803a-8078-4832-a736-330d6bcde6ce.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 934.437362] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b47218e-3d2d-427a-852d-48a4be81e073 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.444964] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Waiting for the task: (returnval){ [ 934.444964] env[68569]: value = "task-3167298" [ 934.444964] env[68569]: _type = "Task" [ 934.444964] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.454575] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167298, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.482994] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.755s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.483632] env[68569]: DEBUG nova.compute.manager [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 934.486594] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.996s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.488261] env[68569]: INFO nova.compute.claims [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 934.517717] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529e9b33-9379-3e54-133b-bf1e7e7d3d3e, 'name': SearchDatastore_Task, 'duration_secs': 0.025432} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.518016] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.518302] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 7696390d-a097-4b6d-827d-92f3165a4188/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk. {{(pid=68569) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 934.518602] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7356c33f-fc03-4e0d-b868-31618afe84c9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.528778] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 934.528778] env[68569]: value = "task-3167299" [ 934.528778] env[68569]: _type = "Task" [ 934.528778] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.542023] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167299, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.796474] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167295, 'name': CreateVM_Task, 'duration_secs': 0.587149} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.796610] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 934.797959] env[68569]: DEBUG oslo_concurrency.lockutils [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.797959] env[68569]: DEBUG oslo_concurrency.lockutils [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 934.798252] env[68569]: DEBUG oslo_concurrency.lockutils [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 934.798495] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fed6a0c-d31e-4d5e-b989-62b506471048 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.806461] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 934.806461] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520c44bb-f1b1-3e23-aaf2-08c2e0c3792f" [ 934.806461] env[68569]: _type = "Task" [ 934.806461] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.823530] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520c44bb-f1b1-3e23-aaf2-08c2e0c3792f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.930669] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167297, 'name': ReconfigVM_Task, 'duration_secs': 0.410297} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.930974] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Reconfigured VM instance instance-00000049 to attach disk [datastore2] c0211ed8-5995-48f4-b339-99bd4c93254c/c0211ed8-5995-48f4-b339-99bd4c93254c.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 934.931667] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1c024f50-a2a4-4b1e-9002-04edc0259227 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.943662] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 934.943662] env[68569]: value = "task-3167300" [ 934.943662] env[68569]: _type = "Task" [ 934.943662] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.961762] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167300, 'name': Rename_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.967166] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167298, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.993412] env[68569]: DEBUG nova.compute.utils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 934.999996] env[68569]: DEBUG nova.compute.manager [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 934.999996] env[68569]: DEBUG nova.network.neutron [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 935.044703] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167299, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.061871] env[68569]: DEBUG nova.policy [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e16bef4b0a6d4a5e937e4f3c4a3329b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67e07f7ab9ab41feb4d71e1d128d093d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 935.325632] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520c44bb-f1b1-3e23-aaf2-08c2e0c3792f, 'name': SearchDatastore_Task, 'duration_secs': 0.069747} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.325991] env[68569]: DEBUG oslo_concurrency.lockutils [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.326723] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 935.326723] env[68569]: DEBUG oslo_concurrency.lockutils [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.326723] env[68569]: DEBUG oslo_concurrency.lockutils [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 935.326903] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 935.327198] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb610e6d-a107-4ac8-8de3-97b2c1f07e56 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.340392] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 935.342933] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 935.342933] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7db85de-0725-4e81-b1ce-e80efb21ca2a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.348541] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 935.348541] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fec9a7-8643-0f35-d3a7-e6d523e3df14" [ 935.348541] env[68569]: _type = "Task" [ 935.348541] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.358494] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fec9a7-8643-0f35-d3a7-e6d523e3df14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.398496] env[68569]: DEBUG nova.network.neutron [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Successfully created port: c350f401-6619-4da6-83e4-c0650e2cfcb6 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 935.456219] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167300, 'name': Rename_Task, 'duration_secs': 0.2642} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.456411] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 935.457016] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f4d28e23-d574-4b21-bd27-e31829d886e3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.463432] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167298, 'name': Rename_Task, 'duration_secs': 0.858631} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.464786] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 935.465525] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 935.465525] env[68569]: value = "task-3167301" [ 935.465525] env[68569]: _type = "Task" [ 935.465525] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.465525] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75452d5b-d31e-47ae-89df-f8e689031611 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.475499] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167301, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.478710] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Waiting for the task: (returnval){ [ 935.478710] env[68569]: value = "task-3167302" [ 935.478710] env[68569]: _type = "Task" [ 935.478710] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.485077] env[68569]: DEBUG oslo_vmware.rw_handles [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d5523f-80ee-b676-38b4-4af20312cb6b/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 935.486017] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee196796-1b0a-44e8-9dc0-4660498d8767 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.491888] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167302, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.496343] env[68569]: DEBUG oslo_vmware.rw_handles [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d5523f-80ee-b676-38b4-4af20312cb6b/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 935.496601] env[68569]: ERROR oslo_vmware.rw_handles [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d5523f-80ee-b676-38b4-4af20312cb6b/disk-0.vmdk due to incomplete transfer. [ 935.496856] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c5975168-9308-40e6-bf0b-2bcd5deef9cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.499155] env[68569]: DEBUG nova.compute.manager [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 935.525114] env[68569]: DEBUG oslo_vmware.rw_handles [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d5523f-80ee-b676-38b4-4af20312cb6b/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 935.525374] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Uploaded image 53cd4603-1b59-4c8d-a074-19290ed071a6 to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 935.527641] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 935.531048] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8e7ae11d-4a2d-4f4f-8ef3-0ab99a7e46f7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.542231] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167299, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572832} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.543840] env[68569]: INFO nova.virt.vmwareapi.ds_util [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 7696390d-a097-4b6d-827d-92f3165a4188/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk. [ 935.543994] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 935.543994] env[68569]: value = "task-3167303" [ 935.543994] env[68569]: _type = "Task" [ 935.543994] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.544806] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267556bb-2eaf-4cb4-b31d-70e379750a19 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.559849] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167303, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.582014] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 7696390d-a097-4b6d-827d-92f3165a4188/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 935.585911] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3ff40bd-cdbc-4519-8ac7-91cfac70a223 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.607110] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 935.607110] env[68569]: value = "task-3167304" [ 935.607110] env[68569]: _type = "Task" [ 935.607110] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.618620] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167304, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.845828] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372434f3-2796-424d-af3c-cf755ef59d9c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.861668] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef3e95e-375c-4d2c-b2ea-fe0b13436ad4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.864922] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fec9a7-8643-0f35-d3a7-e6d523e3df14, 'name': SearchDatastore_Task, 'duration_secs': 0.010742} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.866384] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b62dc8d5-1f83-49df-b913-0f9bb4252552 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.898052] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06718c38-c0af-4f7e-a896-13c50e5910fa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.900153] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 935.900153] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52071c78-e844-a7cd-b85c-05b91bc3ee7d" [ 935.900153] env[68569]: _type = "Task" [ 935.900153] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.906622] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11e04db-d310-4835-9f53-644a4edf7572 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.913625] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52071c78-e844-a7cd-b85c-05b91bc3ee7d, 'name': SearchDatastore_Task, 'duration_secs': 0.01964} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.914245] env[68569]: DEBUG oslo_concurrency.lockutils [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.914543] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 398dd3c7-c630-4a29-b204-80f6fb394ce8/398dd3c7-c630-4a29-b204-80f6fb394ce8.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 935.914914] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7035b777-33de-4a50-90ee-a5f8b4cc15a1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.924708] env[68569]: DEBUG nova.compute.provider_tree [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.932625] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 935.932625] env[68569]: value = "task-3167305" [ 935.932625] env[68569]: _type = "Task" [ 935.932625] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.944206] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167305, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.977180] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167301, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.989512] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167302, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.058911] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167303, 'name': Destroy_Task, 'duration_secs': 0.321904} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.059080] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Destroyed the VM [ 936.059311] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 936.059570] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e9e3d0d8-03ef-4f98-8f6f-35c7ac23c0d0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.068056] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 936.068056] env[68569]: value = "task-3167306" [ 936.068056] env[68569]: _type = "Task" [ 936.068056] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.078904] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167306, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.118121] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167304, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.428015] env[68569]: DEBUG nova.scheduler.client.report [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 936.446148] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167305, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.478194] env[68569]: DEBUG oslo_vmware.api [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167301, 'name': PowerOnVM_Task, 'duration_secs': 0.591174} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.478574] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 936.478668] env[68569]: DEBUG nova.compute.manager [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 936.479762] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5447bcd0-3712-4eec-97e2-aefb8e37b508 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.495801] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167302, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.508571] env[68569]: DEBUG nova.compute.manager [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 936.540474] env[68569]: DEBUG nova.virt.hardware [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 936.540745] env[68569]: DEBUG nova.virt.hardware [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.540904] env[68569]: DEBUG nova.virt.hardware [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 936.541095] env[68569]: DEBUG nova.virt.hardware [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.541277] env[68569]: DEBUG nova.virt.hardware [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 936.541438] env[68569]: DEBUG nova.virt.hardware [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 936.541745] env[68569]: DEBUG nova.virt.hardware [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 936.541947] env[68569]: DEBUG nova.virt.hardware [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 936.542161] env[68569]: DEBUG nova.virt.hardware [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 936.542369] env[68569]: DEBUG nova.virt.hardware [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 936.542567] env[68569]: DEBUG nova.virt.hardware [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 936.543901] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d3203b-1ae3-4ecd-946e-8c593ea58a10 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.554563] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d18437-e0c8-4384-975b-45d7ab5f8373 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.583200] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167306, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.622852] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167304, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.897612] env[68569]: DEBUG nova.compute.manager [req-38c6a113-2322-4b2f-b55f-4b0d50bf29bd req-843577fc-1b44-4f5a-a2b5-9b412cc69bc1 service nova] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Received event network-vif-plugged-c350f401-6619-4da6-83e4-c0650e2cfcb6 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 936.897858] env[68569]: DEBUG oslo_concurrency.lockutils [req-38c6a113-2322-4b2f-b55f-4b0d50bf29bd req-843577fc-1b44-4f5a-a2b5-9b412cc69bc1 service nova] Acquiring lock "f5f8b054-7ee4-40f5-84de-1cee02949cd2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.898112] env[68569]: DEBUG oslo_concurrency.lockutils [req-38c6a113-2322-4b2f-b55f-4b0d50bf29bd req-843577fc-1b44-4f5a-a2b5-9b412cc69bc1 service nova] Lock "f5f8b054-7ee4-40f5-84de-1cee02949cd2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.898453] env[68569]: DEBUG oslo_concurrency.lockutils [req-38c6a113-2322-4b2f-b55f-4b0d50bf29bd req-843577fc-1b44-4f5a-a2b5-9b412cc69bc1 service nova] Lock "f5f8b054-7ee4-40f5-84de-1cee02949cd2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.898575] env[68569]: DEBUG nova.compute.manager [req-38c6a113-2322-4b2f-b55f-4b0d50bf29bd req-843577fc-1b44-4f5a-a2b5-9b412cc69bc1 service nova] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] No waiting events found dispatching network-vif-plugged-c350f401-6619-4da6-83e4-c0650e2cfcb6 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 936.898707] env[68569]: WARNING nova.compute.manager [req-38c6a113-2322-4b2f-b55f-4b0d50bf29bd req-843577fc-1b44-4f5a-a2b5-9b412cc69bc1 service nova] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Received unexpected event network-vif-plugged-c350f401-6619-4da6-83e4-c0650e2cfcb6 for instance with vm_state building and task_state spawning. [ 936.933741] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.447s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.934299] env[68569]: DEBUG nova.compute.manager [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 936.937366] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.918s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.938848] env[68569]: INFO nova.compute.claims [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 936.950812] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167305, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.941809} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.951094] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 398dd3c7-c630-4a29-b204-80f6fb394ce8/398dd3c7-c630-4a29-b204-80f6fb394ce8.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 936.951308] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 936.951767] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-675ede73-c868-4ee2-abf8-ca26160f82fd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.960354] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 936.960354] env[68569]: value = "task-3167307" [ 936.960354] env[68569]: _type = "Task" [ 936.960354] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.974200] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167307, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.997394] env[68569]: DEBUG oslo_vmware.api [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167302, 'name': PowerOnVM_Task, 'duration_secs': 1.228513} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.998344] env[68569]: DEBUG nova.network.neutron [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Successfully updated port: c350f401-6619-4da6-83e4-c0650e2cfcb6 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 937.005581] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 937.005829] env[68569]: INFO nova.compute.manager [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Took 11.58 seconds to spawn the instance on the hypervisor. [ 937.006017] env[68569]: DEBUG nova.compute.manager [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 937.007955] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.008797] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a1039f-9017-4103-85f2-74754119de24 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.083046] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167306, 'name': RemoveSnapshot_Task, 'duration_secs': 0.880912} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.083046] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 937.083238] env[68569]: DEBUG nova.compute.manager [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 937.083879] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551a56e5-2b3f-4814-b3a4-a66dfaa1edbc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.119052] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167304, 'name': ReconfigVM_Task, 'duration_secs': 1.15612} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.119367] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 7696390d-a097-4b6d-827d-92f3165a4188/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 937.120254] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd2296f-aac3-4638-bc75-a8aa6f5061e7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.148835] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd583e7f-249e-41e5-8735-e557ffcad197 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.165460] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 937.165460] env[68569]: value = "task-3167308" [ 937.165460] env[68569]: _type = "Task" [ 937.165460] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.174469] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167308, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.443685] env[68569]: DEBUG nova.compute.utils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 937.445249] env[68569]: DEBUG nova.compute.manager [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 937.445249] env[68569]: DEBUG nova.network.neutron [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 937.470870] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167307, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07548} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.471523] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 937.471980] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6caa95fa-303e-47ac-8d06-37c3d26447ba {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.497742] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 398dd3c7-c630-4a29-b204-80f6fb394ce8/398dd3c7-c630-4a29-b204-80f6fb394ce8.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 937.499570] env[68569]: DEBUG nova.policy [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '868bbe891585423f85374f6dffdc7813', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62ff17f9dcc242e0aff061402e57bdcd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 937.501415] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13bd8195-a729-4625-8455-1d0f10d0a781 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.516515] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "refresh_cache-f5f8b054-7ee4-40f5-84de-1cee02949cd2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.516655] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "refresh_cache-f5f8b054-7ee4-40f5-84de-1cee02949cd2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.516795] env[68569]: DEBUG nova.network.neutron [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 937.527636] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 937.527636] env[68569]: value = "task-3167309" [ 937.527636] env[68569]: _type = "Task" [ 937.527636] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.533517] env[68569]: INFO nova.compute.manager [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Took 41.22 seconds to build instance. [ 937.541146] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167309, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.598060] env[68569]: INFO nova.compute.manager [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Shelve offloading [ 937.677097] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167308, 'name': ReconfigVM_Task, 'duration_secs': 0.415186} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.677433] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 937.677708] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83729b2a-9958-4782-af00-d1bb33d6959b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.683112] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "c0211ed8-5995-48f4-b339-99bd4c93254c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.683378] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "c0211ed8-5995-48f4-b339-99bd4c93254c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.683574] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "c0211ed8-5995-48f4-b339-99bd4c93254c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.683758] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "c0211ed8-5995-48f4-b339-99bd4c93254c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.683935] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "c0211ed8-5995-48f4-b339-99bd4c93254c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.686925] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 937.686925] env[68569]: value = "task-3167310" [ 937.686925] env[68569]: _type = "Task" [ 937.686925] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.687481] env[68569]: INFO nova.compute.manager [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Terminating instance [ 937.699694] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167310, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.846387] env[68569]: DEBUG nova.network.neutron [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Successfully created port: 292385c9-acfb-49af-88d2-6fb5794c6268 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 937.948045] env[68569]: DEBUG nova.compute.manager [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 938.036092] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3e0e508-12ae-4f2d-999e-afb9d0504b19 tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Lock "2cf8803a-8078-4832-a736-330d6bcde6ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.227s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.045590] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167309, 'name': ReconfigVM_Task, 'duration_secs': 0.333817} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.049626] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 398dd3c7-c630-4a29-b204-80f6fb394ce8/398dd3c7-c630-4a29-b204-80f6fb394ce8.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 938.051027] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2faf44eb-6cdc-4ce0-af90-0ba53d4386ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.061030] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 938.061030] env[68569]: value = "task-3167311" [ 938.061030] env[68569]: _type = "Task" [ 938.061030] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.076377] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167311, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.085692] env[68569]: DEBUG nova.network.neutron [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.104999] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 938.104999] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-546f9879-c92c-4f44-a7a7-b45fded9e9b5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.113895] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 938.113895] env[68569]: value = "task-3167312" [ 938.113895] env[68569]: _type = "Task" [ 938.113895] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.129904] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] VM already powered off {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 938.130245] env[68569]: DEBUG nova.compute.manager [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 938.131191] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951f040e-1b07-42e0-beac-7a0ebee3f835 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.140806] env[68569]: DEBUG oslo_concurrency.lockutils [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "refresh_cache-b91304c7-e74d-402b-865b-150e0057c895" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.141048] env[68569]: DEBUG oslo_concurrency.lockutils [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "refresh_cache-b91304c7-e74d-402b-865b-150e0057c895" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.141218] env[68569]: DEBUG nova.network.neutron [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 938.179464] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "f8b56e81-f3ef-489b-a64c-be687cf99fd1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.179464] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "f8b56e81-f3ef-489b-a64c-be687cf99fd1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.194847] env[68569]: DEBUG nova.compute.manager [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 938.195065] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 938.196350] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b703a83-3834-4a47-82ca-307a0d124185 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.205453] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167310, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.211585] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 938.212169] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42f231b1-6ea7-4428-8809-f41e228dd9c4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.225602] env[68569]: DEBUG oslo_vmware.api [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 938.225602] env[68569]: value = "task-3167313" [ 938.225602] env[68569]: _type = "Task" [ 938.225602] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.238955] env[68569]: DEBUG oslo_vmware.api [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167313, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.337080] env[68569]: DEBUG nova.network.neutron [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Updating instance_info_cache with network_info: [{"id": "c350f401-6619-4da6-83e4-c0650e2cfcb6", "address": "fa:16:3e:6f:22:8a", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc350f401-66", "ovs_interfaceid": "c350f401-6619-4da6-83e4-c0650e2cfcb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.391480] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba81667-764d-4fba-b19a-b47479b14930 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.400917] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c5d0f5-6137-46ba-b8fb-89cafbeeba46 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.437120] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d827c42-9299-4673-9efa-fa36c5a6386b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.447284] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca8c95a-ba72-4cb8-adf0-a4563c907a71 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.467460] env[68569]: DEBUG nova.compute.provider_tree [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.570641] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167311, 'name': Rename_Task, 'duration_secs': 0.338879} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.570641] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 938.570824] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a6930f6-fab7-44d2-9cc4-94af04cce5af {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.581892] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 938.581892] env[68569]: value = "task-3167314" [ 938.581892] env[68569]: _type = "Task" [ 938.581892] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.595528] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167314, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.681820] env[68569]: DEBUG nova.compute.manager [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 938.700798] env[68569]: DEBUG oslo_vmware.api [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167310, 'name': PowerOnVM_Task, 'duration_secs': 0.876365} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.701114] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 938.704368] env[68569]: DEBUG nova.compute.manager [None req-7fa5203e-a097-44c9-acaf-f5e52e189285 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 938.704924] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7912d24-e37f-45a4-8e3c-baaa735020b0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.737418] env[68569]: DEBUG oslo_vmware.api [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167313, 'name': PowerOffVM_Task, 'duration_secs': 0.228461} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.737747] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 938.737926] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 938.738192] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-65add5b1-0507-448e-b9b0-65db81d3b684 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.817052] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 938.817285] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 938.817498] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleting the datastore file [datastore2] c0211ed8-5995-48f4-b339-99bd4c93254c {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 938.817790] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df2c3326-ba7b-40e1-a11c-f36034fabe07 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.826295] env[68569]: DEBUG oslo_vmware.api [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 938.826295] env[68569]: value = "task-3167316" [ 938.826295] env[68569]: _type = "Task" [ 938.826295] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.835164] env[68569]: DEBUG oslo_vmware.api [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167316, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.840786] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "refresh_cache-f5f8b054-7ee4-40f5-84de-1cee02949cd2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 938.841117] env[68569]: DEBUG nova.compute.manager [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Instance network_info: |[{"id": "c350f401-6619-4da6-83e4-c0650e2cfcb6", "address": "fa:16:3e:6f:22:8a", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc350f401-66", "ovs_interfaceid": "c350f401-6619-4da6-83e4-c0650e2cfcb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 938.841506] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:22:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54c45719-5690-47bf-b45b-6cad9813071e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c350f401-6619-4da6-83e4-c0650e2cfcb6', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 938.849596] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Creating folder: Project (67e07f7ab9ab41feb4d71e1d128d093d). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 938.852606] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-327748c4-93a2-45b0-99e2-c4434fa6ac75 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.867126] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Created folder: Project (67e07f7ab9ab41feb4d71e1d128d093d) in parent group-v633430. [ 938.867343] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Creating folder: Instances. Parent ref: group-v633649. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 938.867819] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb637a21-b328-4038-ba6d-aa75457641fd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.885036] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Created folder: Instances in parent group-v633649. [ 938.885487] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 938.885617] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 938.885736] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cbad1f68-464b-495a-8d92-c5cefc1d9810 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.903112] env[68569]: DEBUG nova.network.neutron [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Updating instance_info_cache with network_info: [{"id": "5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f", "address": "fa:16:3e:58:35:5f", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c9a2dbd-a7", "ovs_interfaceid": "5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.912288] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 938.912288] env[68569]: value = "task-3167319" [ 938.912288] env[68569]: _type = "Task" [ 938.912288] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.922509] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167319, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.926236] env[68569]: DEBUG nova.compute.manager [req-4f7754d8-584e-4c95-82c6-74f1eb08f6a4 req-a7c492cd-bdd6-4b23-bfd9-69dc28b3eff0 service nova] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Received event network-changed-c350f401-6619-4da6-83e4-c0650e2cfcb6 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 938.926489] env[68569]: DEBUG nova.compute.manager [req-4f7754d8-584e-4c95-82c6-74f1eb08f6a4 req-a7c492cd-bdd6-4b23-bfd9-69dc28b3eff0 service nova] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Refreshing instance network info cache due to event network-changed-c350f401-6619-4da6-83e4-c0650e2cfcb6. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 938.926773] env[68569]: DEBUG oslo_concurrency.lockutils [req-4f7754d8-584e-4c95-82c6-74f1eb08f6a4 req-a7c492cd-bdd6-4b23-bfd9-69dc28b3eff0 service nova] Acquiring lock "refresh_cache-f5f8b054-7ee4-40f5-84de-1cee02949cd2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.927047] env[68569]: DEBUG oslo_concurrency.lockutils [req-4f7754d8-584e-4c95-82c6-74f1eb08f6a4 req-a7c492cd-bdd6-4b23-bfd9-69dc28b3eff0 service nova] Acquired lock "refresh_cache-f5f8b054-7ee4-40f5-84de-1cee02949cd2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.927129] env[68569]: DEBUG nova.network.neutron [req-4f7754d8-584e-4c95-82c6-74f1eb08f6a4 req-a7c492cd-bdd6-4b23-bfd9-69dc28b3eff0 service nova] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Refreshing network info cache for port c350f401-6619-4da6-83e4-c0650e2cfcb6 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 938.970353] env[68569]: DEBUG nova.compute.manager [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 938.973442] env[68569]: DEBUG nova.scheduler.client.report [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 939.006801] env[68569]: DEBUG nova.virt.hardware [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 939.007197] env[68569]: DEBUG nova.virt.hardware [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 939.007373] env[68569]: DEBUG nova.virt.hardware [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 939.007559] env[68569]: DEBUG nova.virt.hardware [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 939.007708] env[68569]: DEBUG nova.virt.hardware [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 939.007853] env[68569]: DEBUG nova.virt.hardware [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 939.008096] env[68569]: DEBUG nova.virt.hardware [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 939.008279] env[68569]: DEBUG nova.virt.hardware [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 939.008459] env[68569]: DEBUG nova.virt.hardware [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 939.008615] env[68569]: DEBUG nova.virt.hardware [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 939.008889] env[68569]: DEBUG nova.virt.hardware [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 939.010321] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603e6d4b-298d-4ace-9435-deeb218ab7e1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.020073] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ea5d39-970e-448c-a450-74fc972840e7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.083553] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Acquiring lock "2cf8803a-8078-4832-a736-330d6bcde6ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.083850] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Lock "2cf8803a-8078-4832-a736-330d6bcde6ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.084115] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Acquiring lock "2cf8803a-8078-4832-a736-330d6bcde6ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.084334] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Lock "2cf8803a-8078-4832-a736-330d6bcde6ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.084496] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Lock "2cf8803a-8078-4832-a736-330d6bcde6ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.090614] env[68569]: INFO nova.compute.manager [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Terminating instance [ 939.100161] env[68569]: DEBUG oslo_vmware.api [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167314, 'name': PowerOnVM_Task, 'duration_secs': 0.468967} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.100312] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 939.100406] env[68569]: DEBUG nova.compute.manager [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 939.101552] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8123ef6a-5642-42cb-bc40-8140250167f7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.207824] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.338118] env[68569]: DEBUG oslo_vmware.api [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167316, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268156} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.338306] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 939.338431] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 939.338604] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 939.338809] env[68569]: INFO nova.compute.manager [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 939.339078] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 939.339265] env[68569]: DEBUG nova.compute.manager [-] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 939.339358] env[68569]: DEBUG nova.network.neutron [-] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 939.409022] env[68569]: DEBUG oslo_concurrency.lockutils [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "refresh_cache-b91304c7-e74d-402b-865b-150e0057c895" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.424562] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167319, 'name': CreateVM_Task, 'duration_secs': 0.444548} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.425040] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 939.426061] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.426430] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.427049] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 939.427431] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d363b8b-06bf-405d-b126-bb5d19717cf7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.435704] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 939.435704] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52de7d0b-5eb4-91b8-7cf5-c6fc196ae022" [ 939.435704] env[68569]: _type = "Task" [ 939.435704] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.444813] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52de7d0b-5eb4-91b8-7cf5-c6fc196ae022, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.485019] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.542s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.485019] env[68569]: DEBUG nova.compute.manager [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 939.494033] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.484s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.494033] env[68569]: DEBUG nova.objects.instance [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68569) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 939.527380] env[68569]: DEBUG nova.network.neutron [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Successfully updated port: 292385c9-acfb-49af-88d2-6fb5794c6268 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 939.600633] env[68569]: DEBUG nova.compute.manager [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 939.600633] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 939.600633] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e46d0d7-6afb-4870-bf0e-847bbc285963 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.619478] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 939.621261] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c81e3fb-676e-4199-b7de-ee3bcb3c845e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.623200] env[68569]: DEBUG oslo_concurrency.lockutils [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.631090] env[68569]: DEBUG oslo_vmware.api [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Waiting for the task: (returnval){ [ 939.631090] env[68569]: value = "task-3167320" [ 939.631090] env[68569]: _type = "Task" [ 939.631090] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.642749] env[68569]: DEBUG oslo_vmware.api [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167320, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.713925] env[68569]: INFO nova.compute.manager [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Unrescuing [ 939.714107] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "refresh_cache-7696390d-a097-4b6d-827d-92f3165a4188" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.714275] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquired lock "refresh_cache-7696390d-a097-4b6d-827d-92f3165a4188" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.714555] env[68569]: DEBUG nova.network.neutron [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 939.949611] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52de7d0b-5eb4-91b8-7cf5-c6fc196ae022, 'name': SearchDatastore_Task, 'duration_secs': 0.0105} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.949960] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.950352] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 939.950456] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.951448] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.951448] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 939.951448] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-449800fb-6b67-4da3-9697-e1a7b9319e65 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.961688] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 939.961981] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 939.962869] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b4b52d5-c872-40a4-b3aa-dacc16a79ff5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.971233] env[68569]: DEBUG nova.network.neutron [req-4f7754d8-584e-4c95-82c6-74f1eb08f6a4 req-a7c492cd-bdd6-4b23-bfd9-69dc28b3eff0 service nova] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Updated VIF entry in instance network info cache for port c350f401-6619-4da6-83e4-c0650e2cfcb6. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 939.971575] env[68569]: DEBUG nova.network.neutron [req-4f7754d8-584e-4c95-82c6-74f1eb08f6a4 req-a7c492cd-bdd6-4b23-bfd9-69dc28b3eff0 service nova] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Updating instance_info_cache with network_info: [{"id": "c350f401-6619-4da6-83e4-c0650e2cfcb6", "address": "fa:16:3e:6f:22:8a", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc350f401-66", "ovs_interfaceid": "c350f401-6619-4da6-83e4-c0650e2cfcb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.976754] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 939.976754] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d4504a-2f53-308c-15e6-e4d6b997dc69" [ 939.976754] env[68569]: _type = "Task" [ 939.976754] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.986632] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d4504a-2f53-308c-15e6-e4d6b997dc69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.988964] env[68569]: DEBUG nova.compute.utils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 939.993817] env[68569]: DEBUG nova.compute.manager [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 939.993817] env[68569]: DEBUG nova.network.neutron [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 940.010085] env[68569]: DEBUG nova.compute.manager [req-1e85703b-da71-4a53-8b98-af2e00205f43 req-e18d3be8-a1e9-461d-a4d9-9f911a243449 service nova] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Received event network-vif-deleted-3df34aa7-452f-49b6-9a1a-0e902948e0c6 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 940.010416] env[68569]: INFO nova.compute.manager [req-1e85703b-da71-4a53-8b98-af2e00205f43 req-e18d3be8-a1e9-461d-a4d9-9f911a243449 service nova] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Neutron deleted interface 3df34aa7-452f-49b6-9a1a-0e902948e0c6; detaching it from the instance and deleting it from the info cache [ 940.011303] env[68569]: DEBUG nova.network.neutron [req-1e85703b-da71-4a53-8b98-af2e00205f43 req-e18d3be8-a1e9-461d-a4d9-9f911a243449 service nova] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.030808] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "refresh_cache-6b2120d3-2e4b-4d1b-8109-6513b3b320eb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.030972] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired lock "refresh_cache-6b2120d3-2e4b-4d1b-8109-6513b3b320eb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.031170] env[68569]: DEBUG nova.network.neutron [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 940.069406] env[68569]: DEBUG nova.policy [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f83489e23864ab286b0d29f9f89d7ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8b924e47d91a488e9d912a5369792aa5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 940.088652] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 940.089617] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0527dd-85e5-4406-8127-62dd016c540a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.098145] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 940.099057] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-537ece29-4337-4059-9c8f-f216c931a753 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.143143] env[68569]: DEBUG oslo_vmware.api [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167320, 'name': PowerOffVM_Task, 'duration_secs': 0.513206} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.143518] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 940.143702] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 940.144315] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3c9b29c-165e-4ece-864d-4b38624fa9a4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.181352] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 940.181716] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 940.182026] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleting the datastore file [datastore2] b91304c7-e74d-402b-865b-150e0057c895 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 940.182543] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fab4a097-a6b8-49d4-9363-00a364529501 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.193033] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 940.193033] env[68569]: value = "task-3167323" [ 940.193033] env[68569]: _type = "Task" [ 940.193033] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.204171] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167323, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.228824] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 940.230906] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 940.233026] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Deleting the datastore file [datastore2] 2cf8803a-8078-4832-a736-330d6bcde6ce {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 940.233026] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c40f1d54-a845-4572-9483-c2e1da2edba2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.241012] env[68569]: DEBUG oslo_vmware.api [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Waiting for the task: (returnval){ [ 940.241012] env[68569]: value = "task-3167324" [ 940.241012] env[68569]: _type = "Task" [ 940.241012] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.251844] env[68569]: DEBUG oslo_vmware.api [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167324, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.476988] env[68569]: DEBUG oslo_concurrency.lockutils [req-4f7754d8-584e-4c95-82c6-74f1eb08f6a4 req-a7c492cd-bdd6-4b23-bfd9-69dc28b3eff0 service nova] Releasing lock "refresh_cache-f5f8b054-7ee4-40f5-84de-1cee02949cd2" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.487779] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d4504a-2f53-308c-15e6-e4d6b997dc69, 'name': SearchDatastore_Task, 'duration_secs': 0.014723} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.488107] env[68569]: DEBUG nova.network.neutron [-] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.492410] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8424b868-73eb-4902-b93a-a00575eae450 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.494745] env[68569]: DEBUG nova.compute.manager [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 940.498982] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 940.498982] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52053de4-8ff0-1cd9-1a22-98326d9be09f" [ 940.498982] env[68569]: _type = "Task" [ 940.498982] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.507408] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1250573-b4cf-452f-8d88-55ca7f481fb4 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.511260] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.301s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.511260] env[68569]: INFO nova.compute.claims [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 940.515058] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-351ecc76-0149-4279-a41c-0abf1769594c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.529922] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52053de4-8ff0-1cd9-1a22-98326d9be09f, 'name': SearchDatastore_Task, 'duration_secs': 0.030666} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.531221] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.531521] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] f5f8b054-7ee4-40f5-84de-1cee02949cd2/f5f8b054-7ee4-40f5-84de-1cee02949cd2.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 940.532156] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93020e61-7d4a-42d9-9f1e-5c5503629da8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.536750] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c49810b-b2af-435f-80f5-f624a7370e90 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.560636] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 940.560636] env[68569]: value = "task-3167325" [ 940.560636] env[68569]: _type = "Task" [ 940.560636] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.570384] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.585762] env[68569]: DEBUG nova.compute.manager [req-1e85703b-da71-4a53-8b98-af2e00205f43 req-e18d3be8-a1e9-461d-a4d9-9f911a243449 service nova] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Detach interface failed, port_id=3df34aa7-452f-49b6-9a1a-0e902948e0c6, reason: Instance c0211ed8-5995-48f4-b339-99bd4c93254c could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 940.610820] env[68569]: DEBUG nova.network.neutron [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Successfully created port: 40a02443-14bb-4c27-9415-e49378e14566 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 940.615285] env[68569]: DEBUG nova.network.neutron [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Updating instance_info_cache with network_info: [{"id": "d99f9970-89b3-4cf5-ae87-3e05a8b03ffa", "address": "fa:16:3e:41:a8:0e", "network": {"id": "f10bb94d-d811-400a-ada9-30d7720c1ddc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2037478415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "03a77ea99dbb445396a7ac5888135321", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd99f9970-89", "ovs_interfaceid": "d99f9970-89b3-4cf5-ae87-3e05a8b03ffa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.630315] env[68569]: DEBUG nova.network.neutron [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 940.654484] env[68569]: DEBUG oslo_concurrency.lockutils [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "367f4fe5-ffef-45f3-b00e-a5cf0418d5cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.654838] env[68569]: DEBUG oslo_concurrency.lockutils [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "367f4fe5-ffef-45f3-b00e-a5cf0418d5cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.655101] env[68569]: DEBUG oslo_concurrency.lockutils [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "367f4fe5-ffef-45f3-b00e-a5cf0418d5cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.655336] env[68569]: DEBUG oslo_concurrency.lockutils [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "367f4fe5-ffef-45f3-b00e-a5cf0418d5cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.655537] env[68569]: DEBUG oslo_concurrency.lockutils [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "367f4fe5-ffef-45f3-b00e-a5cf0418d5cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.659891] env[68569]: INFO nova.compute.manager [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Terminating instance [ 940.702624] env[68569]: DEBUG oslo_vmware.api [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167323, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169252} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.702901] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 940.703187] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 940.703407] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 940.728885] env[68569]: INFO nova.scheduler.client.report [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleted allocations for instance b91304c7-e74d-402b-865b-150e0057c895 [ 940.752926] env[68569]: DEBUG oslo_vmware.api [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Task: {'id': task-3167324, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165783} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.753295] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 940.753533] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 940.753772] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 940.754014] env[68569]: INFO nova.compute.manager [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Took 1.16 seconds to destroy the instance on the hypervisor. [ 940.754332] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 940.754715] env[68569]: DEBUG nova.compute.manager [-] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 940.754715] env[68569]: DEBUG nova.network.neutron [-] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 940.992958] env[68569]: INFO nova.compute.manager [-] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Took 1.65 seconds to deallocate network for instance. [ 941.071816] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167325, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.102690] env[68569]: DEBUG nova.compute.manager [req-28766666-14cf-41d4-9c77-620890de89d3 req-2db405e1-e2d7-40f0-9fb8-c8d9200af5f0 service nova] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Received event network-vif-plugged-292385c9-acfb-49af-88d2-6fb5794c6268 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 941.102918] env[68569]: DEBUG oslo_concurrency.lockutils [req-28766666-14cf-41d4-9c77-620890de89d3 req-2db405e1-e2d7-40f0-9fb8-c8d9200af5f0 service nova] Acquiring lock "6b2120d3-2e4b-4d1b-8109-6513b3b320eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.103137] env[68569]: DEBUG oslo_concurrency.lockutils [req-28766666-14cf-41d4-9c77-620890de89d3 req-2db405e1-e2d7-40f0-9fb8-c8d9200af5f0 service nova] Lock "6b2120d3-2e4b-4d1b-8109-6513b3b320eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.104427] env[68569]: DEBUG oslo_concurrency.lockutils [req-28766666-14cf-41d4-9c77-620890de89d3 req-2db405e1-e2d7-40f0-9fb8-c8d9200af5f0 service nova] Lock "6b2120d3-2e4b-4d1b-8109-6513b3b320eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.104427] env[68569]: DEBUG nova.compute.manager [req-28766666-14cf-41d4-9c77-620890de89d3 req-2db405e1-e2d7-40f0-9fb8-c8d9200af5f0 service nova] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] No waiting events found dispatching network-vif-plugged-292385c9-acfb-49af-88d2-6fb5794c6268 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 941.104427] env[68569]: WARNING nova.compute.manager [req-28766666-14cf-41d4-9c77-620890de89d3 req-2db405e1-e2d7-40f0-9fb8-c8d9200af5f0 service nova] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Received unexpected event network-vif-plugged-292385c9-acfb-49af-88d2-6fb5794c6268 for instance with vm_state building and task_state spawning. [ 941.104427] env[68569]: DEBUG nova.compute.manager [req-28766666-14cf-41d4-9c77-620890de89d3 req-2db405e1-e2d7-40f0-9fb8-c8d9200af5f0 service nova] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Received event network-changed-292385c9-acfb-49af-88d2-6fb5794c6268 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 941.104427] env[68569]: DEBUG nova.compute.manager [req-28766666-14cf-41d4-9c77-620890de89d3 req-2db405e1-e2d7-40f0-9fb8-c8d9200af5f0 service nova] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Refreshing instance network info cache due to event network-changed-292385c9-acfb-49af-88d2-6fb5794c6268. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 941.104943] env[68569]: DEBUG oslo_concurrency.lockutils [req-28766666-14cf-41d4-9c77-620890de89d3 req-2db405e1-e2d7-40f0-9fb8-c8d9200af5f0 service nova] Acquiring lock "refresh_cache-6b2120d3-2e4b-4d1b-8109-6513b3b320eb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.118441] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Releasing lock "refresh_cache-7696390d-a097-4b6d-827d-92f3165a4188" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.119096] env[68569]: DEBUG nova.objects.instance [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lazy-loading 'flavor' on Instance uuid 7696390d-a097-4b6d-827d-92f3165a4188 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 941.163621] env[68569]: DEBUG nova.compute.manager [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 941.163845] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 941.164927] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4b4d44-e046-477b-9733-2347833bacc0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.176550] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.176809] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bcdcacfe-3086-47ed-914f-8b59c0c36208 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.184900] env[68569]: DEBUG oslo_vmware.api [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 941.184900] env[68569]: value = "task-3167326" [ 941.184900] env[68569]: _type = "Task" [ 941.184900] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.195863] env[68569]: DEBUG oslo_vmware.api [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167326, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.233911] env[68569]: DEBUG oslo_concurrency.lockutils [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.304206] env[68569]: DEBUG nova.network.neutron [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Updating instance_info_cache with network_info: [{"id": "292385c9-acfb-49af-88d2-6fb5794c6268", "address": "fa:16:3e:c3:23:2a", "network": {"id": "bf7e9923-9223-4b14-8176-d3910d2b1c90", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1728039945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62ff17f9dcc242e0aff061402e57bdcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap292385c9-ac", "ovs_interfaceid": "292385c9-acfb-49af-88d2-6fb5794c6268", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.505130] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.506229] env[68569]: DEBUG nova.compute.manager [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 941.534033] env[68569]: DEBUG nova.virt.hardware [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 941.534314] env[68569]: DEBUG nova.virt.hardware [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 941.534522] env[68569]: DEBUG nova.virt.hardware [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 941.534708] env[68569]: DEBUG nova.virt.hardware [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 941.534855] env[68569]: DEBUG nova.virt.hardware [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 941.535033] env[68569]: DEBUG nova.virt.hardware [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 941.535244] env[68569]: DEBUG nova.virt.hardware [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 941.535416] env[68569]: DEBUG nova.virt.hardware [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 941.535625] env[68569]: DEBUG nova.virt.hardware [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 941.535812] env[68569]: DEBUG nova.virt.hardware [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 941.536009] env[68569]: DEBUG nova.virt.hardware [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 941.536921] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2c83e5-8944-4120-8f25-b53046a216af {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.548837] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ea6ddb-29f8-40d4-a525-71a43e29d8f1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.576824] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167325, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.723452} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.577138] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] f5f8b054-7ee4-40f5-84de-1cee02949cd2/f5f8b054-7ee4-40f5-84de-1cee02949cd2.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 941.577376] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 941.577646] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b469ebf5-027b-42c3-adfc-5f40526e60aa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.584420] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 941.584420] env[68569]: value = "task-3167327" [ 941.584420] env[68569]: _type = "Task" [ 941.584420] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.592723] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167327, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.624218] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca65054-7619-4c90-b3de-84d8f23f5f38 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.652218] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.654465] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3f44243-4021-4db3-b796-3628716be454 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.662988] env[68569]: DEBUG oslo_vmware.api [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 941.662988] env[68569]: value = "task-3167328" [ 941.662988] env[68569]: _type = "Task" [ 941.662988] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.673729] env[68569]: DEBUG oslo_vmware.api [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167328, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.698615] env[68569]: DEBUG oslo_vmware.api [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167326, 'name': PowerOffVM_Task, 'duration_secs': 0.293631} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.701507] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 941.701687] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.702150] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57ddad91-06e8-4314-883f-7e72c0841ae2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.776229] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 941.776579] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 941.776699] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleting the datastore file [datastore2] 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.779922] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19636314-6490-46fb-bf62-053ab70b53c3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.788554] env[68569]: DEBUG oslo_vmware.api [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 941.788554] env[68569]: value = "task-3167330" [ 941.788554] env[68569]: _type = "Task" [ 941.788554] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.802661] env[68569]: DEBUG oslo_vmware.api [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167330, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.809025] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lock "refresh_cache-6b2120d3-2e4b-4d1b-8109-6513b3b320eb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.809405] env[68569]: DEBUG nova.compute.manager [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Instance network_info: |[{"id": "292385c9-acfb-49af-88d2-6fb5794c6268", "address": "fa:16:3e:c3:23:2a", "network": {"id": "bf7e9923-9223-4b14-8176-d3910d2b1c90", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1728039945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62ff17f9dcc242e0aff061402e57bdcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap292385c9-ac", "ovs_interfaceid": "292385c9-acfb-49af-88d2-6fb5794c6268", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 941.810167] env[68569]: DEBUG oslo_concurrency.lockutils [req-28766666-14cf-41d4-9c77-620890de89d3 req-2db405e1-e2d7-40f0-9fb8-c8d9200af5f0 service nova] Acquired lock "refresh_cache-6b2120d3-2e4b-4d1b-8109-6513b3b320eb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 941.810282] env[68569]: DEBUG nova.network.neutron [req-28766666-14cf-41d4-9c77-620890de89d3 req-2db405e1-e2d7-40f0-9fb8-c8d9200af5f0 service nova] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Refreshing network info cache for port 292385c9-acfb-49af-88d2-6fb5794c6268 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 941.811660] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:23:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '667a2e97-c1be-421d-9941-6b84c2629b43', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '292385c9-acfb-49af-88d2-6fb5794c6268', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 941.820941] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Creating folder: Project (62ff17f9dcc242e0aff061402e57bdcd). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 941.822204] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb872cfd-3fc4-44a5-94a3-bf3301a6ba85 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.840416] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Created folder: Project (62ff17f9dcc242e0aff061402e57bdcd) in parent group-v633430. [ 941.840685] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Creating folder: Instances. Parent ref: group-v633652. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 941.842100] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b898c2f9-1b12-459b-9534-b423cde35975 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.844838] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2b9a35-c8a0-42a9-a09f-fe06237e2b55 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.854703] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813ce707-7c4a-4763-bf11-f71468d2c6b5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.860024] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Created folder: Instances in parent group-v633652. [ 941.860338] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 941.860957] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 941.861230] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-993a0161-37e8-4ab6-8184-d0af8e403f45 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.903597] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa173e8-8a6d-440e-a5a3-8d56d80767d0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.909625] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 941.909625] env[68569]: value = "task-3167333" [ 941.909625] env[68569]: _type = "Task" [ 941.909625] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.915917] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639447b1-62e4-4279-8d2b-4d5c1ebb376b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.937327] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167333, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.938396] env[68569]: DEBUG nova.compute.provider_tree [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.946746] env[68569]: DEBUG nova.network.neutron [-] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.095237] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167327, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071364} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.095534] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 942.096316] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f2c075-e1ce-4e81-809e-88db553b7207 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.119186] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] f5f8b054-7ee4-40f5-84de-1cee02949cd2/f5f8b054-7ee4-40f5-84de-1cee02949cd2.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 942.120606] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57aa560c-7dd4-4d48-a8a2-f23f880062b4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.138160] env[68569]: DEBUG nova.compute.manager [req-3c77a733-afc4-48ca-aab0-2ada9fa584bb req-f20878da-bd7d-4226-a802-047b1585a65e service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] Received event network-vif-unplugged-5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 942.138160] env[68569]: DEBUG oslo_concurrency.lockutils [req-3c77a733-afc4-48ca-aab0-2ada9fa584bb req-f20878da-bd7d-4226-a802-047b1585a65e service nova] Acquiring lock "b91304c7-e74d-402b-865b-150e0057c895-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.138160] env[68569]: DEBUG oslo_concurrency.lockutils [req-3c77a733-afc4-48ca-aab0-2ada9fa584bb req-f20878da-bd7d-4226-a802-047b1585a65e service nova] Lock "b91304c7-e74d-402b-865b-150e0057c895-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.138160] env[68569]: DEBUG oslo_concurrency.lockutils [req-3c77a733-afc4-48ca-aab0-2ada9fa584bb req-f20878da-bd7d-4226-a802-047b1585a65e service nova] Lock "b91304c7-e74d-402b-865b-150e0057c895-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.138160] env[68569]: DEBUG nova.compute.manager [req-3c77a733-afc4-48ca-aab0-2ada9fa584bb req-f20878da-bd7d-4226-a802-047b1585a65e service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] No waiting events found dispatching network-vif-unplugged-5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 942.138405] env[68569]: DEBUG nova.compute.manager [req-3c77a733-afc4-48ca-aab0-2ada9fa584bb req-f20878da-bd7d-4226-a802-047b1585a65e service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] Received event network-vif-unplugged-5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f for instance with task_state deleting. {{(pid=68569) _process_instance_event /opt/stack/nova/nova/compute/manager.py:11522}} [ 942.138405] env[68569]: DEBUG nova.compute.manager [req-3c77a733-afc4-48ca-aab0-2ada9fa584bb req-f20878da-bd7d-4226-a802-047b1585a65e service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] Received event network-changed-5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 942.138405] env[68569]: DEBUG nova.compute.manager [req-3c77a733-afc4-48ca-aab0-2ada9fa584bb req-f20878da-bd7d-4226-a802-047b1585a65e service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] Refreshing instance network info cache due to event network-changed-5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 942.138405] env[68569]: DEBUG oslo_concurrency.lockutils [req-3c77a733-afc4-48ca-aab0-2ada9fa584bb req-f20878da-bd7d-4226-a802-047b1585a65e service nova] Acquiring lock "refresh_cache-b91304c7-e74d-402b-865b-150e0057c895" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.138405] env[68569]: DEBUG oslo_concurrency.lockutils [req-3c77a733-afc4-48ca-aab0-2ada9fa584bb req-f20878da-bd7d-4226-a802-047b1585a65e service nova] Acquired lock "refresh_cache-b91304c7-e74d-402b-865b-150e0057c895" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.138628] env[68569]: DEBUG nova.network.neutron [req-3c77a733-afc4-48ca-aab0-2ada9fa584bb req-f20878da-bd7d-4226-a802-047b1585a65e service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] Refreshing network info cache for port 5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 942.146651] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 942.146651] env[68569]: value = "task-3167334" [ 942.146651] env[68569]: _type = "Task" [ 942.146651] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.164081] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167334, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.173712] env[68569]: DEBUG oslo_vmware.api [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167328, 'name': PowerOffVM_Task, 'duration_secs': 0.245917} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.174148] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 942.180126] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Reconfiguring VM instance instance-0000004a to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 942.180444] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65888345-c29b-4995-a5c9-c24c53e865f6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.210292] env[68569]: DEBUG oslo_vmware.api [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 942.210292] env[68569]: value = "task-3167335" [ 942.210292] env[68569]: _type = "Task" [ 942.210292] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.220996] env[68569]: DEBUG oslo_vmware.api [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167335, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.300041] env[68569]: DEBUG oslo_vmware.api [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167330, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224597} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.300456] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 942.300779] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 942.301079] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 942.301397] env[68569]: INFO nova.compute.manager [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Took 1.14 seconds to destroy the instance on the hypervisor. [ 942.301798] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 942.302081] env[68569]: DEBUG nova.compute.manager [-] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 942.302206] env[68569]: DEBUG nova.network.neutron [-] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 942.423121] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167333, 'name': CreateVM_Task, 'duration_secs': 0.40496} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.423296] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 942.424635] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.424777] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.425184] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 942.425602] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec13ec78-4b8e-4106-8280-6f5d5ccc67dc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.433593] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 942.433593] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52cf8db1-991e-6e8d-286b-f7e9133f4e77" [ 942.433593] env[68569]: _type = "Task" [ 942.433593] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.445190] env[68569]: DEBUG nova.scheduler.client.report [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 942.448614] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52cf8db1-991e-6e8d-286b-f7e9133f4e77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.452881] env[68569]: INFO nova.compute.manager [-] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Took 1.70 seconds to deallocate network for instance. [ 942.487879] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac6ba209-f744-47f5-abc6-10fbfe4c4113 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "b91304c7-e74d-402b-865b-150e0057c895" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.658361] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167334, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.720781] env[68569]: DEBUG oslo_vmware.api [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167335, 'name': ReconfigVM_Task, 'duration_secs': 0.32226} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.724715] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Reconfigured VM instance instance-0000004a to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 942.724914] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 942.725402] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ecd6f5a6-f6bd-4ad4-aeca-36c03dd3f601 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.730747] env[68569]: DEBUG nova.network.neutron [req-28766666-14cf-41d4-9c77-620890de89d3 req-2db405e1-e2d7-40f0-9fb8-c8d9200af5f0 service nova] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Updated VIF entry in instance network info cache for port 292385c9-acfb-49af-88d2-6fb5794c6268. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 942.730926] env[68569]: DEBUG nova.network.neutron [req-28766666-14cf-41d4-9c77-620890de89d3 req-2db405e1-e2d7-40f0-9fb8-c8d9200af5f0 service nova] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Updating instance_info_cache with network_info: [{"id": "292385c9-acfb-49af-88d2-6fb5794c6268", "address": "fa:16:3e:c3:23:2a", "network": {"id": "bf7e9923-9223-4b14-8176-d3910d2b1c90", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1728039945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62ff17f9dcc242e0aff061402e57bdcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap292385c9-ac", "ovs_interfaceid": "292385c9-acfb-49af-88d2-6fb5794c6268", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.733166] env[68569]: DEBUG oslo_vmware.api [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 942.733166] env[68569]: value = "task-3167336" [ 942.733166] env[68569]: _type = "Task" [ 942.733166] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.746521] env[68569]: DEBUG oslo_vmware.api [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167336, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.868543] env[68569]: DEBUG nova.network.neutron [req-3c77a733-afc4-48ca-aab0-2ada9fa584bb req-f20878da-bd7d-4226-a802-047b1585a65e service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] Updated VIF entry in instance network info cache for port 5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 942.868929] env[68569]: DEBUG nova.network.neutron [req-3c77a733-afc4-48ca-aab0-2ada9fa584bb req-f20878da-bd7d-4226-a802-047b1585a65e service nova] [instance: b91304c7-e74d-402b-865b-150e0057c895] Updating instance_info_cache with network_info: [{"id": "5c9a2dbd-a7a3-4f38-93ec-b9cb0ce8e20f", "address": "fa:16:3e:58:35:5f", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": null, "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap5c9a2dbd-a7", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.918598] env[68569]: DEBUG nova.network.neutron [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Successfully updated port: 40a02443-14bb-4c27-9415-e49378e14566 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 942.944749] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52cf8db1-991e-6e8d-286b-f7e9133f4e77, 'name': SearchDatastore_Task, 'duration_secs': 0.012503} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.945055] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.945298] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.945529] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.945692] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.945889] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.946166] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44afdb1d-7823-41dd-87f1-298b267ac64a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.952289] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.444s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.952782] env[68569]: DEBUG nova.compute.manager [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 942.955359] env[68569]: DEBUG oslo_concurrency.lockutils [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 3.332s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.955505] env[68569]: DEBUG nova.objects.instance [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68569) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 942.958635] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.960385] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.960605] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 942.961597] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc4bf135-1ef6-4dd7-9f0a-07f368c9c576 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.969643] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 942.969643] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524b7019-f216-75c2-5d9b-6e6f448ef79d" [ 942.969643] env[68569]: _type = "Task" [ 942.969643] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.978916] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524b7019-f216-75c2-5d9b-6e6f448ef79d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.158054] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167334, 'name': ReconfigVM_Task, 'duration_secs': 0.596133} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.158209] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Reconfigured VM instance instance-0000004d to attach disk [datastore2] f5f8b054-7ee4-40f5-84de-1cee02949cd2/f5f8b054-7ee4-40f5-84de-1cee02949cd2.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 943.158786] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c9d48b5c-4ce7-4807-b867-6368310395c8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.167589] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 943.167589] env[68569]: value = "task-3167337" [ 943.167589] env[68569]: _type = "Task" [ 943.167589] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.175276] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167337, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.225806] env[68569]: DEBUG nova.network.neutron [-] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.240485] env[68569]: DEBUG oslo_concurrency.lockutils [req-28766666-14cf-41d4-9c77-620890de89d3 req-2db405e1-e2d7-40f0-9fb8-c8d9200af5f0 service nova] Releasing lock "refresh_cache-6b2120d3-2e4b-4d1b-8109-6513b3b320eb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.248827] env[68569]: DEBUG oslo_vmware.api [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167336, 'name': PowerOnVM_Task, 'duration_secs': 0.433316} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.249816] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 943.250098] env[68569]: DEBUG nova.compute.manager [None req-d0d088ce-2309-48cc-8b71-9b7bf4bada72 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 943.250966] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee5d979-b828-4bc1-8ec4-887a0d4774e7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.372171] env[68569]: DEBUG oslo_concurrency.lockutils [req-3c77a733-afc4-48ca-aab0-2ada9fa584bb req-f20878da-bd7d-4226-a802-047b1585a65e service nova] Releasing lock "refresh_cache-b91304c7-e74d-402b-865b-150e0057c895" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.372445] env[68569]: DEBUG nova.compute.manager [req-3c77a733-afc4-48ca-aab0-2ada9fa584bb req-f20878da-bd7d-4226-a802-047b1585a65e service nova] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Received event network-vif-deleted-9ff21d1d-5195-4832-bc11-4f0170d636b7 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 943.421717] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "refresh_cache-e9994248-0240-412b-9e60-a04b00e5c0cd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.421717] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquired lock "refresh_cache-e9994248-0240-412b-9e60-a04b00e5c0cd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 943.421896] env[68569]: DEBUG nova.network.neutron [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.459818] env[68569]: DEBUG nova.compute.utils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 943.464195] env[68569]: DEBUG nova.compute.manager [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 943.464369] env[68569]: DEBUG nova.network.neutron [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 943.482053] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524b7019-f216-75c2-5d9b-6e6f448ef79d, 'name': SearchDatastore_Task, 'duration_secs': 0.01896} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.483063] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e804b6d7-6156-4dc0-be39-fb497b168517 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.490239] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 943.490239] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5235bdaf-16d2-791d-dcbd-f0db6f187e4a" [ 943.490239] env[68569]: _type = "Task" [ 943.490239] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.499662] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5235bdaf-16d2-791d-dcbd-f0db6f187e4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.510764] env[68569]: DEBUG nova.policy [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afebab35cda9438781e2b466ce586405', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cc82d358e214a959ae6b34c33344b86', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 943.676573] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167337, 'name': Rename_Task, 'duration_secs': 0.142185} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.677744] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 943.677744] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e574b5c3-1497-4554-87ab-7f10a0b58fb8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.684506] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 943.684506] env[68569]: value = "task-3167338" [ 943.684506] env[68569]: _type = "Task" [ 943.684506] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.693097] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167338, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.728715] env[68569]: INFO nova.compute.manager [-] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Took 1.43 seconds to deallocate network for instance. [ 943.781546] env[68569]: DEBUG nova.network.neutron [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Successfully created port: 2455c808-85b7-415e-9905-53066039a3f3 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 943.952793] env[68569]: DEBUG nova.network.neutron [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 943.968617] env[68569]: DEBUG nova.compute.manager [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 943.972336] env[68569]: DEBUG oslo_concurrency.lockutils [None req-50d4aa38-dc82-46b7-ae9f-31550f436800 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.974066] env[68569]: DEBUG oslo_concurrency.lockutils [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.740s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.974066] env[68569]: DEBUG nova.objects.instance [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lazy-loading 'resources' on Instance uuid b91304c7-e74d-402b-865b-150e0057c895 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.002324] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5235bdaf-16d2-791d-dcbd-f0db6f187e4a, 'name': SearchDatastore_Task, 'duration_secs': 0.011582} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.002983] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.003308] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 6b2120d3-2e4b-4d1b-8109-6513b3b320eb/6b2120d3-2e4b-4d1b-8109-6513b3b320eb.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 944.003581] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d10147c-c957-4baf-828a-7a5c9efb4f5c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.013901] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 944.013901] env[68569]: value = "task-3167339" [ 944.013901] env[68569]: _type = "Task" [ 944.013901] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.024898] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167339, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.143521] env[68569]: DEBUG nova.compute.manager [req-74ffb3f8-9349-4464-bb34-047ee51740c2 req-368cfdda-2851-4498-b60a-7898ac0cf5a0 service nova] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Received event network-vif-deleted-948d6af6-c562-4b44-b852-89a32cf79dad {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 944.144146] env[68569]: DEBUG nova.compute.manager [req-74ffb3f8-9349-4464-bb34-047ee51740c2 req-368cfdda-2851-4498-b60a-7898ac0cf5a0 service nova] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Received event network-vif-plugged-40a02443-14bb-4c27-9415-e49378e14566 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 944.144453] env[68569]: DEBUG oslo_concurrency.lockutils [req-74ffb3f8-9349-4464-bb34-047ee51740c2 req-368cfdda-2851-4498-b60a-7898ac0cf5a0 service nova] Acquiring lock "e9994248-0240-412b-9e60-a04b00e5c0cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.144854] env[68569]: DEBUG oslo_concurrency.lockutils [req-74ffb3f8-9349-4464-bb34-047ee51740c2 req-368cfdda-2851-4498-b60a-7898ac0cf5a0 service nova] Lock "e9994248-0240-412b-9e60-a04b00e5c0cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.145181] env[68569]: DEBUG oslo_concurrency.lockutils [req-74ffb3f8-9349-4464-bb34-047ee51740c2 req-368cfdda-2851-4498-b60a-7898ac0cf5a0 service nova] Lock "e9994248-0240-412b-9e60-a04b00e5c0cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.145471] env[68569]: DEBUG nova.compute.manager [req-74ffb3f8-9349-4464-bb34-047ee51740c2 req-368cfdda-2851-4498-b60a-7898ac0cf5a0 service nova] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] No waiting events found dispatching network-vif-plugged-40a02443-14bb-4c27-9415-e49378e14566 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 944.145751] env[68569]: WARNING nova.compute.manager [req-74ffb3f8-9349-4464-bb34-047ee51740c2 req-368cfdda-2851-4498-b60a-7898ac0cf5a0 service nova] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Received unexpected event network-vif-plugged-40a02443-14bb-4c27-9415-e49378e14566 for instance with vm_state building and task_state spawning. [ 944.146063] env[68569]: DEBUG nova.compute.manager [req-74ffb3f8-9349-4464-bb34-047ee51740c2 req-368cfdda-2851-4498-b60a-7898ac0cf5a0 service nova] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Received event network-changed-40a02443-14bb-4c27-9415-e49378e14566 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 944.146752] env[68569]: DEBUG nova.compute.manager [req-74ffb3f8-9349-4464-bb34-047ee51740c2 req-368cfdda-2851-4498-b60a-7898ac0cf5a0 service nova] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Refreshing instance network info cache due to event network-changed-40a02443-14bb-4c27-9415-e49378e14566. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 944.146931] env[68569]: DEBUG oslo_concurrency.lockutils [req-74ffb3f8-9349-4464-bb34-047ee51740c2 req-368cfdda-2851-4498-b60a-7898ac0cf5a0 service nova] Acquiring lock "refresh_cache-e9994248-0240-412b-9e60-a04b00e5c0cd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.147975] env[68569]: DEBUG nova.network.neutron [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Updating instance_info_cache with network_info: [{"id": "40a02443-14bb-4c27-9415-e49378e14566", "address": "fa:16:3e:4b:33:67", "network": {"id": "63b6f3d4-66a9-477c-b7a6-566cf0d9ab2d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-638108957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b924e47d91a488e9d912a5369792aa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40a02443-14", "ovs_interfaceid": "40a02443-14bb-4c27-9415-e49378e14566", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.196173] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167338, 'name': PowerOnVM_Task} progress is 86%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.237044] env[68569]: DEBUG oslo_concurrency.lockutils [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.479746] env[68569]: DEBUG nova.objects.instance [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lazy-loading 'numa_topology' on Instance uuid b91304c7-e74d-402b-865b-150e0057c895 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.529971] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167339, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.586267] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "7696390d-a097-4b6d-827d-92f3165a4188" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.586597] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "7696390d-a097-4b6d-827d-92f3165a4188" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.587078] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "7696390d-a097-4b6d-827d-92f3165a4188-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.587405] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "7696390d-a097-4b6d-827d-92f3165a4188-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.587679] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "7696390d-a097-4b6d-827d-92f3165a4188-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.590682] env[68569]: INFO nova.compute.manager [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Terminating instance [ 944.650522] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Releasing lock "refresh_cache-e9994248-0240-412b-9e60-a04b00e5c0cd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.650979] env[68569]: DEBUG nova.compute.manager [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Instance network_info: |[{"id": "40a02443-14bb-4c27-9415-e49378e14566", "address": "fa:16:3e:4b:33:67", "network": {"id": "63b6f3d4-66a9-477c-b7a6-566cf0d9ab2d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-638108957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b924e47d91a488e9d912a5369792aa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40a02443-14", "ovs_interfaceid": "40a02443-14bb-4c27-9415-e49378e14566", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 944.651890] env[68569]: DEBUG oslo_concurrency.lockutils [req-74ffb3f8-9349-4464-bb34-047ee51740c2 req-368cfdda-2851-4498-b60a-7898ac0cf5a0 service nova] Acquired lock "refresh_cache-e9994248-0240-412b-9e60-a04b00e5c0cd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 944.651890] env[68569]: DEBUG nova.network.neutron [req-74ffb3f8-9349-4464-bb34-047ee51740c2 req-368cfdda-2851-4498-b60a-7898ac0cf5a0 service nova] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Refreshing network info cache for port 40a02443-14bb-4c27-9415-e49378e14566 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 944.653206] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:33:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5fe645c-e088-401e-ab53-4ae2981dea72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40a02443-14bb-4c27-9415-e49378e14566', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 944.660777] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 944.662107] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 944.662220] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a13620c3-3738-4339-b20a-849ff3af54c4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.685388] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 944.685388] env[68569]: value = "task-3167340" [ 944.685388] env[68569]: _type = "Task" [ 944.685388] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.696921] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167340, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.699991] env[68569]: DEBUG oslo_vmware.api [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167338, 'name': PowerOnVM_Task, 'duration_secs': 0.808496} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.700257] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 944.700451] env[68569]: INFO nova.compute.manager [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Took 8.19 seconds to spawn the instance on the hypervisor. [ 944.700623] env[68569]: DEBUG nova.compute.manager [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 944.702028] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d264b9ed-dce3-41b5-a396-43545aef744a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.981193] env[68569]: DEBUG nova.compute.manager [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 944.983757] env[68569]: DEBUG nova.objects.base [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 945.011168] env[68569]: DEBUG nova.virt.hardware [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 945.011422] env[68569]: DEBUG nova.virt.hardware [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 945.011579] env[68569]: DEBUG nova.virt.hardware [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 945.011759] env[68569]: DEBUG nova.virt.hardware [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 945.011904] env[68569]: DEBUG nova.virt.hardware [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 945.012061] env[68569]: DEBUG nova.virt.hardware [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 945.012272] env[68569]: DEBUG nova.virt.hardware [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 945.012427] env[68569]: DEBUG nova.virt.hardware [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 945.012590] env[68569]: DEBUG nova.virt.hardware [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 945.012750] env[68569]: DEBUG nova.virt.hardware [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 945.012969] env[68569]: DEBUG nova.virt.hardware [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 945.014203] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e09c2e-ea07-46da-9613-2f2836e80930 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.029351] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afcfb22-ee17-4ae9-9cb0-7b89f1460379 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.033427] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167339, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.71761} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.035845] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 6b2120d3-2e4b-4d1b-8109-6513b3b320eb/6b2120d3-2e4b-4d1b-8109-6513b3b320eb.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 945.036077] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 945.036770] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6266a149-ae2d-48bb-9063-dd1a238e260f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.050280] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 945.050280] env[68569]: value = "task-3167341" [ 945.050280] env[68569]: _type = "Task" [ 945.050280] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.059542] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167341, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.095434] env[68569]: DEBUG nova.compute.manager [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 945.095693] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 945.099129] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd21645b-6153-4d98-84de-2059c12abf79 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.108203] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.108495] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8054fe4f-4554-4b38-9458-c57fbd2fde15 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.116884] env[68569]: DEBUG oslo_vmware.api [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 945.116884] env[68569]: value = "task-3167342" [ 945.116884] env[68569]: _type = "Task" [ 945.116884] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.126979] env[68569]: DEBUG oslo_vmware.api [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167342, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.198232] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167340, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.221605] env[68569]: INFO nova.compute.manager [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Took 14.65 seconds to build instance. [ 945.318018] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d336cbf7-f672-41a5-a78c-52b65ae67e74 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.327614] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2856f5e2-badd-467c-919f-55fcf171d340 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.364535] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5356cec4-ac8a-4f25-bd04-48cc89b99c7e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.373828] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ddd14a1-b924-4149-8c8a-1532a1a9ba97 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.378281] env[68569]: DEBUG nova.network.neutron [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Successfully updated port: 2455c808-85b7-415e-9905-53066039a3f3 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 945.395224] env[68569]: DEBUG nova.compute.provider_tree [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.449059] env[68569]: DEBUG nova.network.neutron [req-74ffb3f8-9349-4464-bb34-047ee51740c2 req-368cfdda-2851-4498-b60a-7898ac0cf5a0 service nova] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Updated VIF entry in instance network info cache for port 40a02443-14bb-4c27-9415-e49378e14566. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 945.449059] env[68569]: DEBUG nova.network.neutron [req-74ffb3f8-9349-4464-bb34-047ee51740c2 req-368cfdda-2851-4498-b60a-7898ac0cf5a0 service nova] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Updating instance_info_cache with network_info: [{"id": "40a02443-14bb-4c27-9415-e49378e14566", "address": "fa:16:3e:4b:33:67", "network": {"id": "63b6f3d4-66a9-477c-b7a6-566cf0d9ab2d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-638108957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b924e47d91a488e9d912a5369792aa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40a02443-14", "ovs_interfaceid": "40a02443-14bb-4c27-9415-e49378e14566", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.561661] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167341, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072303} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.561661] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 945.562527] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320af79a-a8f0-493d-8523-993bb94a9356 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.587429] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 6b2120d3-2e4b-4d1b-8109-6513b3b320eb/6b2120d3-2e4b-4d1b-8109-6513b3b320eb.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 945.589492] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1015d619-8410-4116-aaa1-ef858bfea1fb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.617621] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 945.617621] env[68569]: value = "task-3167343" [ 945.617621] env[68569]: _type = "Task" [ 945.617621] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.634953] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167343, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.635205] env[68569]: DEBUG oslo_vmware.api [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167342, 'name': PowerOffVM_Task, 'duration_secs': 0.423103} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.635260] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.635440] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 945.635722] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df555561-dec4-4781-b4de-c7763a460106 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.698952] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167340, 'name': CreateVM_Task, 'duration_secs': 0.972844} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.699319] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 945.700322] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.700558] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 945.700910] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 945.701219] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0744b024-777c-43f8-b416-bde4d9a6a961 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.708295] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 945.708295] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c51aff-e945-fd1a-064d-e196e5f7838b" [ 945.708295] env[68569]: _type = "Task" [ 945.708295] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.709622] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 945.709827] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 945.710032] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Deleting the datastore file [datastore1] 7696390d-a097-4b6d-827d-92f3165a4188 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.713255] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1b1bf02-bf9b-4de8-b3f4-b8982a4221a3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.721496] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c51aff-e945-fd1a-064d-e196e5f7838b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.723235] env[68569]: DEBUG oslo_concurrency.lockutils [None req-de5ed40e-ac5b-4134-b704-9f6758453923 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "f5f8b054-7ee4-40f5-84de-1cee02949cd2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.167s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.723589] env[68569]: DEBUG oslo_vmware.api [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 945.723589] env[68569]: value = "task-3167345" [ 945.723589] env[68569]: _type = "Task" [ 945.723589] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.732336] env[68569]: DEBUG oslo_vmware.api [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167345, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.885394] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "refresh_cache-f8b56e81-f3ef-489b-a64c-be687cf99fd1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.885587] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "refresh_cache-f8b56e81-f3ef-489b-a64c-be687cf99fd1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 945.885587] env[68569]: DEBUG nova.network.neutron [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 945.900328] env[68569]: DEBUG nova.scheduler.client.report [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 945.951221] env[68569]: DEBUG oslo_concurrency.lockutils [req-74ffb3f8-9349-4464-bb34-047ee51740c2 req-368cfdda-2851-4498-b60a-7898ac0cf5a0 service nova] Releasing lock "refresh_cache-e9994248-0240-412b-9e60-a04b00e5c0cd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 946.110479] env[68569]: INFO nova.compute.manager [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Rebuilding instance [ 946.128889] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167343, 'name': ReconfigVM_Task, 'duration_secs': 0.335692} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.129277] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 6b2120d3-2e4b-4d1b-8109-6513b3b320eb/6b2120d3-2e4b-4d1b-8109-6513b3b320eb.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 946.133236] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0764948-9cdd-4625-aa8c-5385dad3368b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.141209] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 946.141209] env[68569]: value = "task-3167346" [ 946.141209] env[68569]: _type = "Task" [ 946.141209] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.152505] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167346, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.152954] env[68569]: DEBUG nova.compute.manager [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 946.153725] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09022e04-616e-4887-8a9e-1ace1fb95942 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.178067] env[68569]: DEBUG nova.compute.manager [req-aa172e99-494c-4846-9c51-c9b4edef7cc8 req-bc9a1ea1-ce70-489b-8ed1-8914149ec9cb service nova] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Received event network-vif-plugged-2455c808-85b7-415e-9905-53066039a3f3 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 946.178288] env[68569]: DEBUG oslo_concurrency.lockutils [req-aa172e99-494c-4846-9c51-c9b4edef7cc8 req-bc9a1ea1-ce70-489b-8ed1-8914149ec9cb service nova] Acquiring lock "f8b56e81-f3ef-489b-a64c-be687cf99fd1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.178436] env[68569]: DEBUG oslo_concurrency.lockutils [req-aa172e99-494c-4846-9c51-c9b4edef7cc8 req-bc9a1ea1-ce70-489b-8ed1-8914149ec9cb service nova] Lock "f8b56e81-f3ef-489b-a64c-be687cf99fd1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.178501] env[68569]: DEBUG oslo_concurrency.lockutils [req-aa172e99-494c-4846-9c51-c9b4edef7cc8 req-bc9a1ea1-ce70-489b-8ed1-8914149ec9cb service nova] Lock "f8b56e81-f3ef-489b-a64c-be687cf99fd1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.178684] env[68569]: DEBUG nova.compute.manager [req-aa172e99-494c-4846-9c51-c9b4edef7cc8 req-bc9a1ea1-ce70-489b-8ed1-8914149ec9cb service nova] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] No waiting events found dispatching network-vif-plugged-2455c808-85b7-415e-9905-53066039a3f3 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 946.178858] env[68569]: WARNING nova.compute.manager [req-aa172e99-494c-4846-9c51-c9b4edef7cc8 req-bc9a1ea1-ce70-489b-8ed1-8914149ec9cb service nova] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Received unexpected event network-vif-plugged-2455c808-85b7-415e-9905-53066039a3f3 for instance with vm_state building and task_state spawning. [ 946.179033] env[68569]: DEBUG nova.compute.manager [req-aa172e99-494c-4846-9c51-c9b4edef7cc8 req-bc9a1ea1-ce70-489b-8ed1-8914149ec9cb service nova] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Received event network-changed-2455c808-85b7-415e-9905-53066039a3f3 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 946.179230] env[68569]: DEBUG nova.compute.manager [req-aa172e99-494c-4846-9c51-c9b4edef7cc8 req-bc9a1ea1-ce70-489b-8ed1-8914149ec9cb service nova] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Refreshing instance network info cache due to event network-changed-2455c808-85b7-415e-9905-53066039a3f3. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 946.179412] env[68569]: DEBUG oslo_concurrency.lockutils [req-aa172e99-494c-4846-9c51-c9b4edef7cc8 req-bc9a1ea1-ce70-489b-8ed1-8914149ec9cb service nova] Acquiring lock "refresh_cache-f8b56e81-f3ef-489b-a64c-be687cf99fd1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.220444] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c51aff-e945-fd1a-064d-e196e5f7838b, 'name': SearchDatastore_Task, 'duration_secs': 0.022263} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.220735] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 946.220976] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 946.221234] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.221870] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.221870] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 946.221870] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ee3f816-36fb-4d2f-ad0d-096292330722 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.232352] env[68569]: DEBUG oslo_vmware.api [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167345, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170292} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.233461] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 946.233663] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 946.233858] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 946.234046] env[68569]: INFO nova.compute.manager [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Took 1.14 seconds to destroy the instance on the hypervisor. [ 946.234281] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 946.234484] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 946.234639] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 946.235344] env[68569]: DEBUG nova.compute.manager [-] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 946.235444] env[68569]: DEBUG nova.network.neutron [-] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 946.238066] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-329e5299-018a-4a3c-ae3f-dc74986b60c5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.242840] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 946.242840] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5204aa31-55d1-baac-6eae-c3fb07e01501" [ 946.242840] env[68569]: _type = "Task" [ 946.242840] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.254948] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5204aa31-55d1-baac-6eae-c3fb07e01501, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.405271] env[68569]: DEBUG oslo_concurrency.lockutils [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.432s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.408977] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.904s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.409191] env[68569]: DEBUG nova.objects.instance [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lazy-loading 'resources' on Instance uuid c0211ed8-5995-48f4-b339-99bd4c93254c {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.436880] env[68569]: DEBUG nova.network.neutron [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 946.653476] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167346, 'name': Rename_Task, 'duration_secs': 0.182818} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.654422] env[68569]: DEBUG nova.network.neutron [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Updating instance_info_cache with network_info: [{"id": "2455c808-85b7-415e-9905-53066039a3f3", "address": "fa:16:3e:45:d7:46", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2455c808-85", "ovs_interfaceid": "2455c808-85b7-415e-9905-53066039a3f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.655830] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 946.656280] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75fc8cb8-2848-45da-a610-bd7eb93f5255 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.670425] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 946.670425] env[68569]: value = "task-3167347" [ 946.670425] env[68569]: _type = "Task" [ 946.670425] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.684170] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167347, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.754953] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5204aa31-55d1-baac-6eae-c3fb07e01501, 'name': SearchDatastore_Task, 'duration_secs': 0.011702} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.755948] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d07b16d-d3c4-4953-be45-7906e086ed9b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.762283] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 946.762283] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524fde8d-689a-3d33-c797-89e9734f8340" [ 946.762283] env[68569]: _type = "Task" [ 946.762283] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.771840] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524fde8d-689a-3d33-c797-89e9734f8340, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.887929] env[68569]: DEBUG nova.compute.manager [req-7b9d15fb-3f37-4385-9b1f-ba7da5087bae req-ca9dd661-05b2-4e83-856a-d269ee1cd0bd service nova] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Received event network-vif-deleted-d99f9970-89b3-4cf5-ae87-3e05a8b03ffa {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 946.888666] env[68569]: INFO nova.compute.manager [req-7b9d15fb-3f37-4385-9b1f-ba7da5087bae req-ca9dd661-05b2-4e83-856a-d269ee1cd0bd service nova] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Neutron deleted interface d99f9970-89b3-4cf5-ae87-3e05a8b03ffa; detaching it from the instance and deleting it from the info cache [ 946.888666] env[68569]: DEBUG nova.network.neutron [req-7b9d15fb-3f37-4385-9b1f-ba7da5087bae req-ca9dd661-05b2-4e83-856a-d269ee1cd0bd service nova] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.920224] env[68569]: DEBUG oslo_concurrency.lockutils [None req-21782647-ee60-4121-85e9-9ebf7273944b tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "b91304c7-e74d-402b-865b-150e0057c895" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.708s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.921838] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac6ba209-f744-47f5-abc6-10fbfe4c4113 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "b91304c7-e74d-402b-865b-150e0057c895" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 4.434s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.922180] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac6ba209-f744-47f5-abc6-10fbfe4c4113 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "b91304c7-e74d-402b-865b-150e0057c895-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.922503] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac6ba209-f744-47f5-abc6-10fbfe4c4113 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "b91304c7-e74d-402b-865b-150e0057c895-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.922794] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac6ba209-f744-47f5-abc6-10fbfe4c4113 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "b91304c7-e74d-402b-865b-150e0057c895-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.925379] env[68569]: INFO nova.compute.manager [None req-ac6ba209-f744-47f5-abc6-10fbfe4c4113 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Terminating instance [ 947.161342] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "refresh_cache-f8b56e81-f3ef-489b-a64c-be687cf99fd1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.161854] env[68569]: DEBUG nova.compute.manager [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Instance network_info: |[{"id": "2455c808-85b7-415e-9905-53066039a3f3", "address": "fa:16:3e:45:d7:46", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2455c808-85", "ovs_interfaceid": "2455c808-85b7-415e-9905-53066039a3f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 947.162662] env[68569]: DEBUG oslo_concurrency.lockutils [req-aa172e99-494c-4846-9c51-c9b4edef7cc8 req-bc9a1ea1-ce70-489b-8ed1-8914149ec9cb service nova] Acquired lock "refresh_cache-f8b56e81-f3ef-489b-a64c-be687cf99fd1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.162971] env[68569]: DEBUG nova.network.neutron [req-aa172e99-494c-4846-9c51-c9b4edef7cc8 req-bc9a1ea1-ce70-489b-8ed1-8914149ec9cb service nova] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Refreshing network info cache for port 2455c808-85b7-415e-9905-53066039a3f3 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 947.164587] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:d7:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2455c808-85b7-415e-9905-53066039a3f3', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 947.173362] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 947.181124] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 947.181489] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 947.183161] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fdfca87c-9af4-4499-a0ac-31e889f30f85 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.187314] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6ace656-f9af-437a-8b4f-a109624acf44 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.217494] env[68569]: DEBUG oslo_vmware.api [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Waiting for the task: (returnval){ [ 947.217494] env[68569]: value = "task-3167348" [ 947.217494] env[68569]: _type = "Task" [ 947.217494] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.217836] env[68569]: DEBUG oslo_vmware.api [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167347, 'name': PowerOnVM_Task, 'duration_secs': 0.525582} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.218115] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 947.218115] env[68569]: value = "task-3167349" [ 947.218115] env[68569]: _type = "Task" [ 947.218115] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.222245] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 947.222389] env[68569]: INFO nova.compute.manager [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Took 8.25 seconds to spawn the instance on the hypervisor. [ 947.222576] env[68569]: DEBUG nova.compute.manager [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 947.226551] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98337baf-0f22-4b94-858c-2292c3885485 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.238686] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c368299a-11f0-4c02-8ee0-adac1fe42b51 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.249365] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167349, 'name': CreateVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.253948] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0121a5d1-d519-4fe6-88fe-2f6a2677e432 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.257301] env[68569]: DEBUG oslo_vmware.api [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167348, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.294305] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f400ab51-c669-4ca9-b968-60895bc0636f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.301840] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524fde8d-689a-3d33-c797-89e9734f8340, 'name': SearchDatastore_Task, 'duration_secs': 0.014223} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.302458] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.302716] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] e9994248-0240-412b-9e60-a04b00e5c0cd/e9994248-0240-412b-9e60-a04b00e5c0cd.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 947.303274] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3be1c3a6-a4a2-4a45-845b-268b77336687 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.309407] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa0ea56-8b41-4638-9e82-c26c9ef35913 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.317680] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 947.317680] env[68569]: value = "task-3167350" [ 947.317680] env[68569]: _type = "Task" [ 947.317680] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.327342] env[68569]: DEBUG nova.compute.provider_tree [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.332041] env[68569]: DEBUG nova.network.neutron [-] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.337410] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167350, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.392515] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7716764a-0fe0-469f-8806-fcd57288590d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.403037] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71bd331-2d76-40d4-9c55-de6a078a67e1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.431575] env[68569]: DEBUG nova.compute.manager [None req-ac6ba209-f744-47f5-abc6-10fbfe4c4113 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 947.431890] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ac6ba209-f744-47f5-abc6-10fbfe4c4113 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 947.453394] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-00221213-6817-4c32-8eb4-d947a56a4467 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.456745] env[68569]: DEBUG nova.compute.manager [req-7b9d15fb-3f37-4385-9b1f-ba7da5087bae req-ca9dd661-05b2-4e83-856a-d269ee1cd0bd service nova] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Detach interface failed, port_id=d99f9970-89b3-4cf5-ae87-3e05a8b03ffa, reason: Instance 7696390d-a097-4b6d-827d-92f3165a4188 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 947.462904] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "7b95aece-35db-4eab-b221-c5eccd749eae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.463223] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "7b95aece-35db-4eab-b221-c5eccd749eae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.472073] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9819e9-1a0b-49d6-ad03-2311b62aeaeb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.501766] env[68569]: DEBUG nova.network.neutron [req-aa172e99-494c-4846-9c51-c9b4edef7cc8 req-bc9a1ea1-ce70-489b-8ed1-8914149ec9cb service nova] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Updated VIF entry in instance network info cache for port 2455c808-85b7-415e-9905-53066039a3f3. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 947.502171] env[68569]: DEBUG nova.network.neutron [req-aa172e99-494c-4846-9c51-c9b4edef7cc8 req-bc9a1ea1-ce70-489b-8ed1-8914149ec9cb service nova] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Updating instance_info_cache with network_info: [{"id": "2455c808-85b7-415e-9905-53066039a3f3", "address": "fa:16:3e:45:d7:46", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2455c808-85", "ovs_interfaceid": "2455c808-85b7-415e-9905-53066039a3f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.518197] env[68569]: WARNING nova.virt.vmwareapi.vmops [None req-ac6ba209-f744-47f5-abc6-10fbfe4c4113 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b91304c7-e74d-402b-865b-150e0057c895 could not be found. [ 947.518441] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ac6ba209-f744-47f5-abc6-10fbfe4c4113 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 947.518743] env[68569]: INFO nova.compute.manager [None req-ac6ba209-f744-47f5-abc6-10fbfe4c4113 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: b91304c7-e74d-402b-865b-150e0057c895] Took 0.09 seconds to destroy the instance on the hypervisor. [ 947.519122] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ac6ba209-f744-47f5-abc6-10fbfe4c4113 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 947.520022] env[68569]: DEBUG oslo_concurrency.lockutils [req-aa172e99-494c-4846-9c51-c9b4edef7cc8 req-bc9a1ea1-ce70-489b-8ed1-8914149ec9cb service nova] Releasing lock "refresh_cache-f8b56e81-f3ef-489b-a64c-be687cf99fd1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.520662] env[68569]: DEBUG nova.compute.manager [-] [instance: b91304c7-e74d-402b-865b-150e0057c895] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 947.520764] env[68569]: DEBUG nova.network.neutron [-] [instance: b91304c7-e74d-402b-865b-150e0057c895] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 947.737670] env[68569]: DEBUG oslo_vmware.api [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167348, 'name': PowerOffVM_Task, 'duration_secs': 0.273815} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.740297] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 947.741831] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 947.741831] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167349, 'name': CreateVM_Task, 'duration_secs': 0.439334} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.741831] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-477f2fa4-9eb5-43a4-a787-cd4fd5e7ec5c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.743242] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 947.743883] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.744060] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.744411] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 947.745841] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ece4627-004a-48f4-9b7a-beee7915113c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.751243] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 947.751243] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5259ece0-9d3e-12d6-3206-6c27613783b6" [ 947.751243] env[68569]: _type = "Task" [ 947.751243] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.753090] env[68569]: DEBUG oslo_vmware.api [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Waiting for the task: (returnval){ [ 947.753090] env[68569]: value = "task-3167351" [ 947.753090] env[68569]: _type = "Task" [ 947.753090] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.782885] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5259ece0-9d3e-12d6-3206-6c27613783b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.783480] env[68569]: INFO nova.compute.manager [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Took 16.31 seconds to build instance. [ 947.784569] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] VM already powered off {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 947.784832] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 947.784982] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633587', 'volume_id': 'e0560587-266e-42c8-ae9a-a3f92ec6742b', 'name': 'volume-e0560587-266e-42c8-ae9a-a3f92ec6742b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a591b671-ca84-47b5-9831-63478d55fd07', 'attached_at': '', 'detached_at': '', 'volume_id': 'e0560587-266e-42c8-ae9a-a3f92ec6742b', 'serial': 'e0560587-266e-42c8-ae9a-a3f92ec6742b'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 947.786060] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77221ea5-19dc-4be1-af41-ff369e3823ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.815362] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd1af62-ed98-4626-8ee4-5ac23d20336f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.829929] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167350, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.832939] env[68569]: DEBUG nova.scheduler.client.report [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 947.837341] env[68569]: INFO nova.compute.manager [-] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Took 1.60 seconds to deallocate network for instance. [ 947.838402] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c884d981-ea40-42fb-a526-b84c8c0e3aae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.866985] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd44b646-5280-44f9-840e-9242840782e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.884923] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] The volume has not been displaced from its original location: [datastore2] volume-e0560587-266e-42c8-ae9a-a3f92ec6742b/volume-e0560587-266e-42c8-ae9a-a3f92ec6742b.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 947.893558] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Reconfiguring VM instance instance-00000042 to detach disk 2000 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 947.894170] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98d0881a-b7f9-4877-9aee-d1639305f485 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.913263] env[68569]: DEBUG oslo_vmware.api [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Waiting for the task: (returnval){ [ 947.913263] env[68569]: value = "task-3167352" [ 947.913263] env[68569]: _type = "Task" [ 947.913263] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.921869] env[68569]: DEBUG oslo_vmware.api [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167352, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.966128] env[68569]: DEBUG nova.compute.manager [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 948.266870] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5259ece0-9d3e-12d6-3206-6c27613783b6, 'name': SearchDatastore_Task, 'duration_secs': 0.06802} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.267242] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.267485] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 948.267759] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.267914] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.268108] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 948.268376] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ea90f98-d4e2-4f30-807e-ccc862988776 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.280724] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 948.280931] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 948.281731] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b927b2aa-ea92-4bca-947b-d6e6f91136c5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.285950] env[68569]: DEBUG oslo_concurrency.lockutils [None req-acc187e0-f3c5-4854-8904-959e8af0d38e tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "6b2120d3-2e4b-4d1b-8109-6513b3b320eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.832s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.287843] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 948.287843] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526079cb-9785-ceb9-19d1-f50a6f312ed0" [ 948.287843] env[68569]: _type = "Task" [ 948.287843] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.296185] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526079cb-9785-ceb9-19d1-f50a6f312ed0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.319264] env[68569]: DEBUG nova.network.neutron [-] [instance: b91304c7-e74d-402b-865b-150e0057c895] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.330683] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167350, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662682} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.331533] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] e9994248-0240-412b-9e60-a04b00e5c0cd/e9994248-0240-412b-9e60-a04b00e5c0cd.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 948.331760] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 948.332137] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a70747ed-66e0-49f6-8efc-c3a0dcee6ed9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.340151] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 948.340151] env[68569]: value = "task-3167353" [ 948.340151] env[68569]: _type = "Task" [ 948.340151] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.344796] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.936s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.347139] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.388s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.347379] env[68569]: DEBUG nova.objects.instance [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Lazy-loading 'resources' on Instance uuid 2cf8803a-8078-4832-a736-330d6bcde6ce {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.349470] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.355166] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167353, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.370215] env[68569]: INFO nova.scheduler.client.report [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleted allocations for instance c0211ed8-5995-48f4-b339-99bd4c93254c [ 948.426544] env[68569]: DEBUG oslo_vmware.api [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167352, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.485111] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.556967] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0ef4f9b-6958-4ca3-9837-186305f7d46b tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "6b2120d3-2e4b-4d1b-8109-6513b3b320eb" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.557435] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0ef4f9b-6958-4ca3-9837-186305f7d46b tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "6b2120d3-2e4b-4d1b-8109-6513b3b320eb" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.557631] env[68569]: DEBUG nova.compute.manager [None req-a0ef4f9b-6958-4ca3-9837-186305f7d46b tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 948.558653] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423e1f1c-1651-441f-9e68-488ae39e6753 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.566336] env[68569]: DEBUG nova.compute.manager [None req-a0ef4f9b-6958-4ca3-9837-186305f7d46b tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68569) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 948.566947] env[68569]: DEBUG nova.objects.instance [None req-a0ef4f9b-6958-4ca3-9837-186305f7d46b tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lazy-loading 'flavor' on Instance uuid 6b2120d3-2e4b-4d1b-8109-6513b3b320eb {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.798654] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526079cb-9785-ceb9-19d1-f50a6f312ed0, 'name': SearchDatastore_Task, 'duration_secs': 0.03326} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.799433] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0803ac86-6cc1-440a-845f-aec033fb8147 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.804419] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 948.804419] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5298dfd3-1ad2-9c5c-7fba-e7a41f16592d" [ 948.804419] env[68569]: _type = "Task" [ 948.804419] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.811993] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5298dfd3-1ad2-9c5c-7fba-e7a41f16592d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.824544] env[68569]: INFO nova.compute.manager [-] [instance: b91304c7-e74d-402b-865b-150e0057c895] Took 1.30 seconds to deallocate network for instance. [ 948.852954] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167353, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088851} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.853924] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 948.854285] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84af2f8-4336-40d8-b70a-082a66b445c9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.877992] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] e9994248-0240-412b-9e60-a04b00e5c0cd/e9994248-0240-412b-9e60-a04b00e5c0cd.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 948.883300] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6886c17-42d2-4702-b0b0-27c5a72ffedb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.898631] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff5c8bd3-794c-4a7d-aef6-01504aad5664 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "c0211ed8-5995-48f4-b339-99bd4c93254c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.215s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.907234] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 948.907234] env[68569]: value = "task-3167354" [ 948.907234] env[68569]: _type = "Task" [ 948.907234] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.923784] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167354, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.931461] env[68569]: DEBUG oslo_vmware.api [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167352, 'name': ReconfigVM_Task, 'duration_secs': 0.532339} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.931745] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Reconfigured VM instance instance-00000042 to detach disk 2000 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 948.936812] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f7b31b7-2235-4c1c-add3-8292f30fd883 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.955459] env[68569]: DEBUG oslo_vmware.api [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Waiting for the task: (returnval){ [ 948.955459] env[68569]: value = "task-3167355" [ 948.955459] env[68569]: _type = "Task" [ 948.955459] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.967231] env[68569]: DEBUG oslo_vmware.api [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167355, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.172716] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd3bb0a-e6bd-4f20-b892-d66b764646e0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.180560] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c7f51f8-43a9-4b50-9d35-018414f6585f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.217253] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068476c1-7969-46ca-b4ab-526bf7d0ed13 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.224931] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8a86d0-5f84-48fb-8061-7b92919616af {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.240159] env[68569]: DEBUG nova.compute.provider_tree [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.317691] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5298dfd3-1ad2-9c5c-7fba-e7a41f16592d, 'name': SearchDatastore_Task, 'duration_secs': 0.052241} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.317988] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.318261] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] f8b56e81-f3ef-489b-a64c-be687cf99fd1/f8b56e81-f3ef-489b-a64c-be687cf99fd1.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 949.318526] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3468fac-d00c-4b20-9eb3-4633cc02187a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.325440] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 949.325440] env[68569]: value = "task-3167356" [ 949.325440] env[68569]: _type = "Task" [ 949.325440] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.338070] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167356, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.417606] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167354, 'name': ReconfigVM_Task, 'duration_secs': 0.441523} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.417930] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Reconfigured VM instance instance-0000004f to attach disk [datastore2] e9994248-0240-412b-9e60-a04b00e5c0cd/e9994248-0240-412b-9e60-a04b00e5c0cd.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 949.418601] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-751fb283-7761-4d2f-a5ea-422faea17151 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.425478] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 949.425478] env[68569]: value = "task-3167357" [ 949.425478] env[68569]: _type = "Task" [ 949.425478] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.434705] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167357, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.465525] env[68569]: DEBUG oslo_vmware.api [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167355, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.575180] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0ef4f9b-6958-4ca3-9837-186305f7d46b tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 949.576043] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce229e21-ae43-4353-b210-1b33fa6afe49 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.582750] env[68569]: DEBUG oslo_vmware.api [None req-a0ef4f9b-6958-4ca3-9837-186305f7d46b tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 949.582750] env[68569]: value = "task-3167358" [ 949.582750] env[68569]: _type = "Task" [ 949.582750] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.591734] env[68569]: DEBUG oslo_vmware.api [None req-a0ef4f9b-6958-4ca3-9837-186305f7d46b tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167358, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.744331] env[68569]: DEBUG nova.scheduler.client.report [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 949.840896] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167356, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.853546] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac6ba209-f744-47f5-abc6-10fbfe4c4113 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "b91304c7-e74d-402b-865b-150e0057c895" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.932s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.936845] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167357, 'name': Rename_Task, 'duration_secs': 0.152339} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.937179] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 949.937438] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad43f16b-21f6-4e40-9ad5-af0db8e536ce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.945459] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 949.945459] env[68569]: value = "task-3167359" [ 949.945459] env[68569]: _type = "Task" [ 949.945459] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.954957] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167359, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.965857] env[68569]: DEBUG oslo_vmware.api [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167355, 'name': ReconfigVM_Task, 'duration_secs': 0.573624} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.966220] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633587', 'volume_id': 'e0560587-266e-42c8-ae9a-a3f92ec6742b', 'name': 'volume-e0560587-266e-42c8-ae9a-a3f92ec6742b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a591b671-ca84-47b5-9831-63478d55fd07', 'attached_at': '', 'detached_at': '', 'volume_id': 'e0560587-266e-42c8-ae9a-a3f92ec6742b', 'serial': 'e0560587-266e-42c8-ae9a-a3f92ec6742b'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 949.966517] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 949.967376] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2935ad19-0c94-4609-acb6-5e0e95ec4268 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.976641] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 949.976985] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ecda97be-9261-42c1-8be7-8bf6e513420c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.050557] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 950.050762] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 950.051011] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Deleting the datastore file [datastore2] a591b671-ca84-47b5-9831-63478d55fd07 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 950.051286] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4f4a072-d46d-4aa0-8a40-ca0c3d341d9b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.059671] env[68569]: DEBUG oslo_vmware.api [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Waiting for the task: (returnval){ [ 950.059671] env[68569]: value = "task-3167361" [ 950.059671] env[68569]: _type = "Task" [ 950.059671] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.068359] env[68569]: DEBUG oslo_vmware.api [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167361, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.093985] env[68569]: DEBUG oslo_vmware.api [None req-a0ef4f9b-6958-4ca3-9837-186305f7d46b tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167358, 'name': PowerOffVM_Task, 'duration_secs': 0.285948} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.094276] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0ef4f9b-6958-4ca3-9837-186305f7d46b tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 950.094471] env[68569]: DEBUG nova.compute.manager [None req-a0ef4f9b-6958-4ca3-9837-186305f7d46b tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 950.095296] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a1b7f2-804e-4f63-b43a-d4bd47687aa8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.250444] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.903s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.252899] env[68569]: DEBUG oslo_concurrency.lockutils [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.016s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.253221] env[68569]: DEBUG nova.objects.instance [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lazy-loading 'resources' on Instance uuid 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.284458] env[68569]: INFO nova.scheduler.client.report [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Deleted allocations for instance 2cf8803a-8078-4832-a736-330d6bcde6ce [ 950.338271] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167356, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722566} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.339342] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] f8b56e81-f3ef-489b-a64c-be687cf99fd1/f8b56e81-f3ef-489b-a64c-be687cf99fd1.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 950.339607] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 950.339892] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3629da62-4e31-4e34-8b8c-917ddd0ce1f5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.348459] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 950.348459] env[68569]: value = "task-3167362" [ 950.348459] env[68569]: _type = "Task" [ 950.348459] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.359617] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167362, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.456826] env[68569]: DEBUG oslo_vmware.api [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167359, 'name': PowerOnVM_Task, 'duration_secs': 0.499082} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.457140] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 950.457337] env[68569]: INFO nova.compute.manager [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Took 8.95 seconds to spawn the instance on the hypervisor. [ 950.457516] env[68569]: DEBUG nova.compute.manager [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 950.458538] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6875a512-aafc-4e50-a0f8-9b16249ac780 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.571784] env[68569]: DEBUG oslo_vmware.api [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Task: {'id': task-3167361, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.572121] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 950.572327] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 950.572498] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 950.608508] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0ef4f9b-6958-4ca3-9837-186305f7d46b tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "6b2120d3-2e4b-4d1b-8109-6513b3b320eb" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.051s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.634319] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 950.634881] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ded85398-c5c7-47b0-b198-c965d7d0a343 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.645739] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384d3ffc-6be6-444c-8732-0e3daf90671a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.684819] env[68569]: ERROR nova.compute.manager [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Failed to detach volume e0560587-266e-42c8-ae9a-a3f92ec6742b from /dev/sda: nova.exception.InstanceNotFound: Instance a591b671-ca84-47b5-9831-63478d55fd07 could not be found. [ 950.684819] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] Traceback (most recent call last): [ 950.684819] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 950.684819] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] self.driver.rebuild(**kwargs) [ 950.684819] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 950.684819] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] raise NotImplementedError() [ 950.684819] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] NotImplementedError [ 950.684819] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] [ 950.684819] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] During handling of the above exception, another exception occurred: [ 950.684819] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] [ 950.684819] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] Traceback (most recent call last): [ 950.684819] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 950.684819] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] self.driver.detach_volume(context, old_connection_info, [ 950.685295] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 950.685295] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] return self._volumeops.detach_volume(connection_info, instance) [ 950.685295] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 950.685295] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] self._detach_volume_vmdk(connection_info, instance) [ 950.685295] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 950.685295] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 950.685295] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 950.685295] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] stable_ref.fetch_moref(session) [ 950.685295] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 950.685295] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] raise exception.InstanceNotFound(instance_id=self._uuid) [ 950.685295] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] nova.exception.InstanceNotFound: Instance a591b671-ca84-47b5-9831-63478d55fd07 could not be found. [ 950.685295] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] [ 950.798772] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346e8260-77da-439f-92c5-74b0ae5f288f tempest-ServerMetadataNegativeTestJSON-1340209830 tempest-ServerMetadataNegativeTestJSON-1340209830-project-member] Lock "2cf8803a-8078-4832-a736-330d6bcde6ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.714s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.854836] env[68569]: DEBUG nova.compute.utils [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Build of instance a591b671-ca84-47b5-9831-63478d55fd07 aborted: Failed to rebuild volume backed instance. {{(pid=68569) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 950.860473] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167362, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099703} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.861052] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 950.861908] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c063b54-fcbe-4094-bd8a-990e03b42ffd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.870037] env[68569]: ERROR nova.compute.manager [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance a591b671-ca84-47b5-9831-63478d55fd07 aborted: Failed to rebuild volume backed instance. [ 950.870037] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] Traceback (most recent call last): [ 950.870037] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 950.870037] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] self.driver.rebuild(**kwargs) [ 950.870037] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 950.870037] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] raise NotImplementedError() [ 950.870037] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] NotImplementedError [ 950.870037] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] [ 950.870037] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] During handling of the above exception, another exception occurred: [ 950.870037] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] [ 950.870037] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] Traceback (most recent call last): [ 950.870037] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 950.870037] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] self._detach_root_volume(context, instance, root_bdm) [ 950.870697] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 950.870697] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] with excutils.save_and_reraise_exception(): [ 950.870697] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 950.870697] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] self.force_reraise() [ 950.870697] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 950.870697] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] raise self.value [ 950.870697] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 950.870697] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] self.driver.detach_volume(context, old_connection_info, [ 950.870697] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 950.870697] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] return self._volumeops.detach_volume(connection_info, instance) [ 950.870697] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 950.870697] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] self._detach_volume_vmdk(connection_info, instance) [ 950.871376] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 950.871376] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 950.871376] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 950.871376] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] stable_ref.fetch_moref(session) [ 950.871376] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 950.871376] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] raise exception.InstanceNotFound(instance_id=self._uuid) [ 950.871376] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] nova.exception.InstanceNotFound: Instance a591b671-ca84-47b5-9831-63478d55fd07 could not be found. [ 950.871376] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] [ 950.871376] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] During handling of the above exception, another exception occurred: [ 950.871376] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] [ 950.871376] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] Traceback (most recent call last): [ 950.871376] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/compute/manager.py", line 11478, in _error_out_instance_on_exception [ 950.871376] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] yield [ 950.871376] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 950.871851] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] self._do_rebuild_instance_with_claim( [ 950.871851] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 950.871851] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] self._do_rebuild_instance( [ 950.871851] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 950.871851] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] self._rebuild_default_impl(**kwargs) [ 950.871851] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 950.871851] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] self._rebuild_volume_backed_instance( [ 950.871851] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 950.871851] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] raise exception.BuildAbortException( [ 950.871851] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] nova.exception.BuildAbortException: Build of instance a591b671-ca84-47b5-9831-63478d55fd07 aborted: Failed to rebuild volume backed instance. [ 950.871851] env[68569]: ERROR nova.compute.manager [instance: a591b671-ca84-47b5-9831-63478d55fd07] [ 950.894261] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] f8b56e81-f3ef-489b-a64c-be687cf99fd1/f8b56e81-f3ef-489b-a64c-be687cf99fd1.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 950.902584] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ca5e17f-c7a2-4bc5-9fb7-94e2d6192b5c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.926697] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 950.926697] env[68569]: value = "task-3167363" [ 950.926697] env[68569]: _type = "Task" [ 950.926697] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.939802] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167363, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.978920] env[68569]: INFO nova.compute.manager [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Took 17.98 seconds to build instance. [ 951.122977] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f5e807-b462-4b2c-a989-8e6ac72a6857 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.132105] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e38d7b-737e-4bef-85f5-64134b304441 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.164084] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e14e77-b169-4679-870d-2f1e66af8986 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.172513] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cedb297-3164-44df-a200-ea799ef14d6b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.188380] env[68569]: DEBUG nova.compute.provider_tree [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.213015] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "adc7f255-be88-4eda-be25-f9ecc9d9bf99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.213176] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "adc7f255-be88-4eda-be25-f9ecc9d9bf99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.217062] env[68569]: DEBUG oslo_concurrency.lockutils [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.217062] env[68569]: DEBUG oslo_concurrency.lockutils [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.401902] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "0dc5da15-4c10-4754-ac82-a130b933295d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.402190] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "0dc5da15-4c10-4754-ac82-a130b933295d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.439390] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167363, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.482743] env[68569]: DEBUG oslo_concurrency.lockutils [None req-79266659-c6cb-447e-a975-dc6c56f5010c tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "e9994248-0240-412b-9e60-a04b00e5c0cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.491s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.594072] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "e9994248-0240-412b-9e60-a04b00e5c0cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.594072] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "e9994248-0240-412b-9e60-a04b00e5c0cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.594072] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "e9994248-0240-412b-9e60-a04b00e5c0cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.594072] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "e9994248-0240-412b-9e60-a04b00e5c0cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.594387] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "e9994248-0240-412b-9e60-a04b00e5c0cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.596469] env[68569]: INFO nova.compute.manager [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Terminating instance [ 951.691516] env[68569]: DEBUG nova.scheduler.client.report [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 951.716694] env[68569]: DEBUG nova.compute.manager [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 951.720223] env[68569]: DEBUG nova.compute.utils [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 951.904757] env[68569]: DEBUG nova.compute.manager [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 951.938190] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167363, 'name': ReconfigVM_Task, 'duration_secs': 0.559492} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.939054] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Reconfigured VM instance instance-00000050 to attach disk [datastore2] f8b56e81-f3ef-489b-a64c-be687cf99fd1/f8b56e81-f3ef-489b-a64c-be687cf99fd1.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.939054] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-007b32fb-b66a-469e-9b98-d08f4e9d0db3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.947071] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 951.947071] env[68569]: value = "task-3167364" [ 951.947071] env[68569]: _type = "Task" [ 951.947071] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.957333] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167364, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.046507] env[68569]: DEBUG nova.compute.manager [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 952.047953] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ed24fa-cf2e-4276-892f-0739abf8432e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.100285] env[68569]: DEBUG nova.compute.manager [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 952.100952] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 952.102020] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55e7a0b-65e1-4e32-9cd2-278a6ae699f5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.114997] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 952.115764] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf2238f6-71f8-44cb-9c86-4fbbd9c3e79f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.123569] env[68569]: DEBUG oslo_vmware.api [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 952.123569] env[68569]: value = "task-3167365" [ 952.123569] env[68569]: _type = "Task" [ 952.123569] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.137052] env[68569]: DEBUG oslo_vmware.api [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167365, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.196742] env[68569]: DEBUG oslo_concurrency.lockutils [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.944s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.199390] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.850s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.199836] env[68569]: DEBUG nova.objects.instance [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lazy-loading 'resources' on Instance uuid 7696390d-a097-4b6d-827d-92f3165a4188 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 952.225773] env[68569]: DEBUG oslo_concurrency.lockutils [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.236698] env[68569]: INFO nova.scheduler.client.report [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleted allocations for instance 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd [ 952.245189] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.430707] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.458918] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167364, 'name': Rename_Task, 'duration_secs': 0.172285} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.459629] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 952.460015] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f0df5532-c79b-405e-abef-56f3c8a53704 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.469868] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 952.469868] env[68569]: value = "task-3167366" [ 952.469868] env[68569]: _type = "Task" [ 952.469868] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.479811] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167366, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.560237] env[68569]: INFO nova.compute.manager [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] instance snapshotting [ 952.560237] env[68569]: WARNING nova.compute.manager [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 952.562969] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d693611b-2d70-40f3-934f-a82eb17f7f07 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.584285] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eade2840-3799-491d-8ff7-7f3c3ff92628 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.638444] env[68569]: DEBUG oslo_vmware.api [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167365, 'name': PowerOffVM_Task, 'duration_secs': 0.206993} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.638720] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 952.638888] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 952.639154] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4dbaa0f2-9fbc-4ca7-a221-e28f3be7d4af {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.717169] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 952.717499] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 952.717718] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Deleting the datastore file [datastore2] e9994248-0240-412b-9e60-a04b00e5c0cd {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 952.718126] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99041d5f-9a7b-4047-85b0-1ec07730b663 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.726844] env[68569]: DEBUG oslo_vmware.api [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 952.726844] env[68569]: value = "task-3167368" [ 952.726844] env[68569]: _type = "Task" [ 952.726844] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.739450] env[68569]: DEBUG oslo_vmware.api [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167368, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.745995] env[68569]: DEBUG oslo_concurrency.lockutils [None req-05fee2b8-cb5b-4e1d-9cc9-30003274e381 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "367f4fe5-ffef-45f3-b00e-a5cf0418d5cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.089s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.887036] env[68569]: DEBUG oslo_concurrency.lockutils [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.983387] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167366, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.016801] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde424ca-f7f0-4e80-861d-b3b63dc24cb1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.032249] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0bd199a-236a-4423-a54e-fdeaf20739a5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.068391] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf856595-fe7a-4552-b4e6-f0d95e0f58a9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.076597] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2242ca-e142-4a98-9a3e-034107477c19 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.091295] env[68569]: DEBUG nova.compute.provider_tree [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 953.098008] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 953.098908] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e9784d47-a73e-4ddb-96fc-70bdde26a746 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.106553] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 953.106553] env[68569]: value = "task-3167369" [ 953.106553] env[68569]: _type = "Task" [ 953.106553] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.115188] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167369, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.132571] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "b770fbd1-579a-4e3e-a5c9-9f030695f057" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.132841] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "b770fbd1-579a-4e3e-a5c9-9f030695f057" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.133063] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "b770fbd1-579a-4e3e-a5c9-9f030695f057-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.133249] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "b770fbd1-579a-4e3e-a5c9-9f030695f057-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.133418] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "b770fbd1-579a-4e3e-a5c9-9f030695f057-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.135571] env[68569]: INFO nova.compute.manager [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Terminating instance [ 953.238478] env[68569]: DEBUG oslo_vmware.api [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167368, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152317} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.238770] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.238954] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 953.239144] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 953.239346] env[68569]: INFO nova.compute.manager [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Took 1.14 seconds to destroy the instance on the hypervisor. [ 953.239613] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 953.239888] env[68569]: DEBUG nova.compute.manager [-] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 953.239990] env[68569]: DEBUG nova.network.neutron [-] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 953.326629] env[68569]: DEBUG oslo_concurrency.lockutils [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.326629] env[68569]: DEBUG oslo_concurrency.lockutils [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.326629] env[68569]: INFO nova.compute.manager [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Attaching volume 3c545470-bd6a-4b3f-859b-0dbc00c8fe24 to /dev/sdb [ 953.363239] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a065cadb-d609-425a-ab8f-fe6ede5566a1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.372530] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad71ca93-1216-4329-8cda-f12acb7cc85b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.387974] env[68569]: DEBUG nova.virt.block_device [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Updating existing volume attachment record: 6ba9b91b-35da-4833-91ea-183f91c72519 {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 953.485286] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167366, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.502392] env[68569]: DEBUG nova.compute.manager [req-9cb9ddee-95d0-4a43-a215-2b9169635f4e req-d5002338-6509-4c3c-84f1-7c0fca712b2e service nova] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Received event network-vif-deleted-40a02443-14bb-4c27-9415-e49378e14566 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 953.502617] env[68569]: INFO nova.compute.manager [req-9cb9ddee-95d0-4a43-a215-2b9169635f4e req-d5002338-6509-4c3c-84f1-7c0fca712b2e service nova] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Neutron deleted interface 40a02443-14bb-4c27-9415-e49378e14566; detaching it from the instance and deleting it from the info cache [ 953.502837] env[68569]: DEBUG nova.network.neutron [req-9cb9ddee-95d0-4a43-a215-2b9169635f4e req-d5002338-6509-4c3c-84f1-7c0fca712b2e service nova] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.527555] env[68569]: DEBUG oslo_concurrency.lockutils [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Acquiring lock "a591b671-ca84-47b5-9831-63478d55fd07" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.527828] env[68569]: DEBUG oslo_concurrency.lockutils [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Lock "a591b671-ca84-47b5-9831-63478d55fd07" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.529028] env[68569]: DEBUG oslo_concurrency.lockutils [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Acquiring lock "a591b671-ca84-47b5-9831-63478d55fd07-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.529028] env[68569]: DEBUG oslo_concurrency.lockutils [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Lock "a591b671-ca84-47b5-9831-63478d55fd07-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.529028] env[68569]: DEBUG oslo_concurrency.lockutils [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Lock "a591b671-ca84-47b5-9831-63478d55fd07-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.531154] env[68569]: INFO nova.compute.manager [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Terminating instance [ 953.594418] env[68569]: DEBUG nova.scheduler.client.report [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 953.617919] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167369, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.642164] env[68569]: DEBUG nova.compute.manager [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 953.642454] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 953.643384] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48916364-ea6d-44bb-a688-aadac5ca9d9e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.651775] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 953.652049] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d554bf8-3666-460b-9ba5-8ff3c3047830 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.659259] env[68569]: DEBUG oslo_vmware.api [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 953.659259] env[68569]: value = "task-3167371" [ 953.659259] env[68569]: _type = "Task" [ 953.659259] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.668384] env[68569]: DEBUG oslo_vmware.api [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167371, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.980337] env[68569]: DEBUG nova.network.neutron [-] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.994599] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167366, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.006532] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b78f9cec-de83-4f19-a61f-78bfa2d579ae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.018477] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a79ee8-83e0-4672-bcdc-2be123bd5921 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.035542] env[68569]: DEBUG nova.compute.manager [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 954.036439] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8b90d098-359e-4abc-8d28-b4988535d37f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.050775] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4398bb29-bada-48b5-ac75-cc527fd34dd4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.074447] env[68569]: DEBUG nova.compute.manager [req-9cb9ddee-95d0-4a43-a215-2b9169635f4e req-d5002338-6509-4c3c-84f1-7c0fca712b2e service nova] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Detach interface failed, port_id=40a02443-14bb-4c27-9415-e49378e14566, reason: Instance e9994248-0240-412b-9e60-a04b00e5c0cd could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 954.101315] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.902s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.103590] env[68569]: WARNING nova.virt.vmwareapi.driver [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance a591b671-ca84-47b5-9831-63478d55fd07 could not be found. [ 954.103808] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 954.104602] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.620s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.106116] env[68569]: INFO nova.compute.claims [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 954.108589] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1a52e9c-a26e-47b0-8b27-bb28acf86407 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.123759] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167369, 'name': CreateSnapshot_Task, 'duration_secs': 0.896599} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.125330] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 954.126383] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2ba219-cdb2-45d2-94ea-855162aa1016 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.131469] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d529514-3f3d-40fe-8edf-64f65a3a8ba5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.143235] env[68569]: INFO nova.scheduler.client.report [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Deleted allocations for instance 7696390d-a097-4b6d-827d-92f3165a4188 [ 954.174719] env[68569]: WARNING nova.virt.vmwareapi.vmops [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a591b671-ca84-47b5-9831-63478d55fd07 could not be found. [ 954.175013] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 954.175213] env[68569]: INFO nova.compute.manager [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Took 0.14 seconds to destroy the instance on the hypervisor. [ 954.175455] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 954.179234] env[68569]: DEBUG nova.compute.manager [-] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 954.179234] env[68569]: DEBUG nova.network.neutron [-] [instance: a591b671-ca84-47b5-9831-63478d55fd07] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 954.187510] env[68569]: DEBUG oslo_vmware.api [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167371, 'name': PowerOffVM_Task, 'duration_secs': 0.255454} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.188485] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 954.188660] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 954.188906] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50e1eea2-3d58-4b98-81ab-9a4f7e32b1a8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.262640] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 954.262844] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 954.263082] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleting the datastore file [datastore1] b770fbd1-579a-4e3e-a5c9-9f030695f057 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 954.263347] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-65971b30-d5dc-487d-8136-ffc696be13e2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.273164] env[68569]: DEBUG oslo_vmware.api [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 954.273164] env[68569]: value = "task-3167375" [ 954.273164] env[68569]: _type = "Task" [ 954.273164] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.283562] env[68569]: DEBUG oslo_vmware.api [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167375, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.486448] env[68569]: INFO nova.compute.manager [-] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Took 1.25 seconds to deallocate network for instance. [ 954.486825] env[68569]: DEBUG oslo_vmware.api [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167366, 'name': PowerOnVM_Task, 'duration_secs': 1.588586} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.492752] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 954.492752] env[68569]: INFO nova.compute.manager [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Took 9.51 seconds to spawn the instance on the hypervisor. [ 954.492911] env[68569]: DEBUG nova.compute.manager [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 954.493936] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c67298-12d9-45b2-8ca6-fa9a4bee41c0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.665996] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 954.666788] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdcc7dcf-f838-423f-87aa-f25ae77ee0a3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "7696390d-a097-4b6d-827d-92f3165a4188" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.080s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.668153] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-55a026a5-1394-410c-8a28-e26847614c97 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.680421] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 954.680421] env[68569]: value = "task-3167376" [ 954.680421] env[68569]: _type = "Task" [ 954.680421] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.689287] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167376, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.777022] env[68569]: DEBUG oslo_concurrency.lockutils [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "123a6895-af16-493a-afce-7ae6c2137422" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.777022] env[68569]: DEBUG oslo_concurrency.lockutils [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "123a6895-af16-493a-afce-7ae6c2137422" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.777022] env[68569]: DEBUG oslo_concurrency.lockutils [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "123a6895-af16-493a-afce-7ae6c2137422-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.777022] env[68569]: DEBUG oslo_concurrency.lockutils [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "123a6895-af16-493a-afce-7ae6c2137422-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.777500] env[68569]: DEBUG oslo_concurrency.lockutils [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "123a6895-af16-493a-afce-7ae6c2137422-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.782596] env[68569]: INFO nova.compute.manager [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Terminating instance [ 954.790375] env[68569]: DEBUG oslo_vmware.api [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167375, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157415} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.791024] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 954.791430] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 954.791796] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 954.792161] env[68569]: INFO nova.compute.manager [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Took 1.15 seconds to destroy the instance on the hypervisor. [ 954.792966] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 954.793204] env[68569]: DEBUG nova.compute.manager [-] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 954.793415] env[68569]: DEBUG nova.network.neutron [-] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 954.995125] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.015710] env[68569]: INFO nova.compute.manager [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Took 15.83 seconds to build instance. [ 955.194341] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167376, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.217033] env[68569]: DEBUG nova.compute.manager [req-7cebd315-679b-49d2-b900-6536be17c292 req-97ec0d72-5818-48f1-ad23-1e2589406d25 service nova] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Received event network-vif-deleted-66ecd67f-062e-46e1-8ee0-6a4806e9d0e8 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 955.217475] env[68569]: INFO nova.compute.manager [req-7cebd315-679b-49d2-b900-6536be17c292 req-97ec0d72-5818-48f1-ad23-1e2589406d25 service nova] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Neutron deleted interface 66ecd67f-062e-46e1-8ee0-6a4806e9d0e8; detaching it from the instance and deleting it from the info cache [ 955.217797] env[68569]: DEBUG nova.network.neutron [req-7cebd315-679b-49d2-b900-6536be17c292 req-97ec0d72-5818-48f1-ad23-1e2589406d25 service nova] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.293689] env[68569]: DEBUG nova.compute.manager [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 955.293689] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 955.295185] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38fa27c-947d-4fea-9396-14a49bc5bd09 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.311910] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 955.313165] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ce157e3-ba03-4b50-858f-02357ceb6c59 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.325475] env[68569]: DEBUG oslo_vmware.api [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 955.325475] env[68569]: value = "task-3167377" [ 955.325475] env[68569]: _type = "Task" [ 955.325475] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.338689] env[68569]: DEBUG oslo_vmware.api [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167377, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.454254] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364c70ff-b750-4154-839b-a91e0498ad52 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.470052] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1701b73-3ade-410d-865f-35b252f126aa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.505413] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6726c2dc-7745-4940-844f-46ddd0b0a573 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.514700] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f9c79f5-194b-4c05-b28f-372b89b56a86 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.519581] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b48bb618-1498-43a2-9157-09e4099eb1fe tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "f8b56e81-f3ef-489b-a64c-be687cf99fd1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.340s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.532488] env[68569]: DEBUG nova.compute.provider_tree [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.543562] env[68569]: DEBUG nova.compute.manager [req-07c0835c-0b3a-48c2-8db4-27fad5d953e5 req-3e71f36c-604e-4460-8ce9-71b627b27d2b service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Received event network-vif-deleted-f270ffee-b36c-4ea6-bfca-2f839c671e61 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 955.543792] env[68569]: INFO nova.compute.manager [req-07c0835c-0b3a-48c2-8db4-27fad5d953e5 req-3e71f36c-604e-4460-8ce9-71b627b27d2b service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Neutron deleted interface f270ffee-b36c-4ea6-bfca-2f839c671e61; detaching it from the instance and deleting it from the info cache [ 955.543924] env[68569]: DEBUG nova.network.neutron [req-07c0835c-0b3a-48c2-8db4-27fad5d953e5 req-3e71f36c-604e-4460-8ce9-71b627b27d2b service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.662783] env[68569]: DEBUG nova.network.neutron [-] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.696793] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167376, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.720817] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-973acf94-4645-4de3-8f02-72810eb9fa41 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.734690] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024d29f8-0084-4872-96e4-0fa859351a74 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.777958] env[68569]: DEBUG nova.compute.manager [req-7cebd315-679b-49d2-b900-6536be17c292 req-97ec0d72-5818-48f1-ad23-1e2589406d25 service nova] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Detach interface failed, port_id=66ecd67f-062e-46e1-8ee0-6a4806e9d0e8, reason: Instance b770fbd1-579a-4e3e-a5c9-9f030695f057 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 955.813789] env[68569]: DEBUG nova.network.neutron [-] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.838161] env[68569]: DEBUG oslo_vmware.api [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167377, 'name': PowerOffVM_Task, 'duration_secs': 0.484814} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.838161] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 955.838428] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 955.839111] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0022acbc-763f-4ea7-aa9b-d1c65763387f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.037596] env[68569]: DEBUG nova.scheduler.client.report [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 956.047249] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae6fbe79-44d6-49fa-9310-00a600def843 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.061477] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1535ed6b-7ffe-4fe4-bbee-9da4396e1753 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.103774] env[68569]: DEBUG nova.compute.manager [req-07c0835c-0b3a-48c2-8db4-27fad5d953e5 req-3e71f36c-604e-4460-8ce9-71b627b27d2b service nova] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Detach interface failed, port_id=f270ffee-b36c-4ea6-bfca-2f839c671e61, reason: Instance a591b671-ca84-47b5-9831-63478d55fd07 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 956.166832] env[68569]: INFO nova.compute.manager [-] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Took 1.37 seconds to deallocate network for instance. [ 956.197788] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167376, 'name': CloneVM_Task} progress is 95%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.317734] env[68569]: INFO nova.compute.manager [-] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Took 2.14 seconds to deallocate network for instance. [ 956.547495] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 956.547495] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 956.547495] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Deleting the datastore file [datastore1] 123a6895-af16-493a-afce-7ae6c2137422 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 956.547495] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.442s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.547495] env[68569]: DEBUG nova.compute.manager [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 956.551389] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67255926-6951-4e16-b328-6714c137ac7f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.551958] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.307s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.553912] env[68569]: INFO nova.compute.claims [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 956.561988] env[68569]: DEBUG oslo_vmware.api [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for the task: (returnval){ [ 956.561988] env[68569]: value = "task-3167380" [ 956.561988] env[68569]: _type = "Task" [ 956.561988] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.571166] env[68569]: DEBUG oslo_vmware.api [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167380, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.675340] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.704567] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167376, 'name': CloneVM_Task, 'duration_secs': 1.700114} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.705973] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Created linked-clone VM from snapshot [ 956.707244] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f51267f-301b-4e74-9a1c-c097822a4c8e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.723181] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Uploading image d761f48a-3fee-45c6-aed3-ecc0a2e681e2 {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 956.752023] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 956.752023] env[68569]: value = "vm-633660" [ 956.752023] env[68569]: _type = "VirtualMachine" [ 956.752023] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 956.752230] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a0fd12ca-b34b-4f8c-aa3d-d461b06b6980 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.761435] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lease: (returnval){ [ 956.761435] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52113f9c-28bb-7549-d144-fc23c7f01d52" [ 956.761435] env[68569]: _type = "HttpNfcLease" [ 956.761435] env[68569]: } obtained for exporting VM: (result){ [ 956.761435] env[68569]: value = "vm-633660" [ 956.761435] env[68569]: _type = "VirtualMachine" [ 956.761435] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 956.762641] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the lease: (returnval){ [ 956.762641] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52113f9c-28bb-7549-d144-fc23c7f01d52" [ 956.762641] env[68569]: _type = "HttpNfcLease" [ 956.762641] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 956.770939] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 956.770939] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52113f9c-28bb-7549-d144-fc23c7f01d52" [ 956.770939] env[68569]: _type = "HttpNfcLease" [ 956.770939] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 956.885437] env[68569]: INFO nova.compute.manager [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Took 0.57 seconds to detach 1 volumes for instance. [ 956.887921] env[68569]: DEBUG nova.compute.manager [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Deleting volume: e0560587-266e-42c8-ae9a-a3f92ec6742b {{(pid=68569) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 957.059491] env[68569]: DEBUG nova.compute.utils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 957.063017] env[68569]: DEBUG nova.compute.manager [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 957.063017] env[68569]: DEBUG nova.network.neutron [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 957.079559] env[68569]: DEBUG oslo_vmware.api [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Task: {'id': task-3167380, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221691} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.081227] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 957.081227] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 957.081369] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 957.082067] env[68569]: INFO nova.compute.manager [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Took 1.79 seconds to destroy the instance on the hypervisor. [ 957.082067] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 957.082236] env[68569]: DEBUG nova.compute.manager [-] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 957.082363] env[68569]: DEBUG nova.network.neutron [-] [instance: 123a6895-af16-493a-afce-7ae6c2137422] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 957.136882] env[68569]: DEBUG nova.policy [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e16bef4b0a6d4a5e937e4f3c4a3329b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67e07f7ab9ab41feb4d71e1d128d093d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 957.155708] env[68569]: DEBUG nova.compute.manager [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Stashing vm_state: active {{(pid=68569) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 957.274667] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 957.274667] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52113f9c-28bb-7549-d144-fc23c7f01d52" [ 957.274667] env[68569]: _type = "HttpNfcLease" [ 957.274667] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 957.274906] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 957.274906] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52113f9c-28bb-7549-d144-fc23c7f01d52" [ 957.274906] env[68569]: _type = "HttpNfcLease" [ 957.274906] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 957.276028] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f613aa5-cc29-454d-aef1-fe762b514064 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.284926] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ccf535-474a-92c6-6762-d603e7915397/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 957.285878] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ccf535-474a-92c6-6762-d603e7915397/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 957.435036] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e5db6caf-20c6-4c9b-b95b-d075d481376d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.450466] env[68569]: DEBUG oslo_concurrency.lockutils [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.491869] env[68569]: DEBUG nova.compute.manager [req-af80be60-3206-4373-86b5-91385c13ea94 req-22e95cf0-2d6c-41c1-9907-e3b0bcb105fa service nova] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Received event network-vif-deleted-4febb0a1-9ff1-4755-b668-4e2723df003b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 957.492120] env[68569]: INFO nova.compute.manager [req-af80be60-3206-4373-86b5-91385c13ea94 req-22e95cf0-2d6c-41c1-9907-e3b0bcb105fa service nova] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Neutron deleted interface 4febb0a1-9ff1-4755-b668-4e2723df003b; detaching it from the instance and deleting it from the info cache [ 957.492345] env[68569]: DEBUG nova.network.neutron [req-af80be60-3206-4373-86b5-91385c13ea94 req-22e95cf0-2d6c-41c1-9907-e3b0bcb105fa service nova] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.562438] env[68569]: DEBUG nova.compute.manager [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 957.662827] env[68569]: DEBUG nova.network.neutron [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Successfully created port: 89cde5fd-0a91-4be2-8dba-e551f8bcb57e {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 957.680238] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.896728] env[68569]: DEBUG nova.network.neutron [-] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.918152] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16aa191-8e25-444b-9fc1-450cd1452e93 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.928219] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae07d88-2d8c-432b-ab24-d42c21c39904 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.968286] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681194e9-f145-4cc2-9ed5-c3ddca3c834a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.979124] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-753f70fe-f793-4d11-a8c0-ccda21a38e3d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.997713] env[68569]: DEBUG nova.compute.provider_tree [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.999241] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b850ed4-52a1-4aeb-b322-8b4d742b5cb8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.010849] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccfd093-2bda-4a89-bda4-d73ff1beafec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.055356] env[68569]: DEBUG nova.compute.manager [req-af80be60-3206-4373-86b5-91385c13ea94 req-22e95cf0-2d6c-41c1-9907-e3b0bcb105fa service nova] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Detach interface failed, port_id=4febb0a1-9ff1-4755-b668-4e2723df003b, reason: Instance 123a6895-af16-493a-afce-7ae6c2137422 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 958.402851] env[68569]: INFO nova.compute.manager [-] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Took 1.32 seconds to deallocate network for instance. [ 958.440955] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 958.442403] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633659', 'volume_id': '3c545470-bd6a-4b3f-859b-0dbc00c8fe24', 'name': 'volume-3c545470-bd6a-4b3f-859b-0dbc00c8fe24', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fd803a5e-8dbd-449e-b45d-1e6410a286e8', 'attached_at': '', 'detached_at': '', 'volume_id': '3c545470-bd6a-4b3f-859b-0dbc00c8fe24', 'serial': '3c545470-bd6a-4b3f-859b-0dbc00c8fe24'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 958.446953] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93ae992-821e-44fe-9da4-2e3e0838d190 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.468702] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f831e1-aadf-4758-a895-15876e2c4cf9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.499978] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] volume-3c545470-bd6a-4b3f-859b-0dbc00c8fe24/volume-3c545470-bd6a-4b3f-859b-0dbc00c8fe24.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 958.500505] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b205bb1-f4ef-4b2c-8b39-a5693f9171ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.516739] env[68569]: DEBUG nova.scheduler.client.report [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 958.529538] env[68569]: DEBUG oslo_vmware.api [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 958.529538] env[68569]: value = "task-3167383" [ 958.529538] env[68569]: _type = "Task" [ 958.529538] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.547034] env[68569]: DEBUG oslo_vmware.api [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167383, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.573925] env[68569]: DEBUG nova.compute.manager [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 958.610379] env[68569]: DEBUG nova.virt.hardware [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 958.611536] env[68569]: DEBUG nova.virt.hardware [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 958.611761] env[68569]: DEBUG nova.virt.hardware [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 958.611914] env[68569]: DEBUG nova.virt.hardware [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 958.612075] env[68569]: DEBUG nova.virt.hardware [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 958.613762] env[68569]: DEBUG nova.virt.hardware [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 958.614058] env[68569]: DEBUG nova.virt.hardware [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 958.614316] env[68569]: DEBUG nova.virt.hardware [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 958.614513] env[68569]: DEBUG nova.virt.hardware [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 958.614682] env[68569]: DEBUG nova.virt.hardware [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 958.615336] env[68569]: DEBUG nova.virt.hardware [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 958.616337] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6abc549e-3055-4012-9cb6-7867f337faa4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.627523] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93bb71b2-ac0d-4742-b5e0-7dc85919ba95 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.852884] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.853226] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.913937] env[68569]: DEBUG oslo_concurrency.lockutils [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.023091] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.471s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.023784] env[68569]: DEBUG nova.compute.manager [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 959.028496] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.598s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.030519] env[68569]: INFO nova.compute.claims [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 959.048181] env[68569]: DEBUG oslo_vmware.api [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167383, 'name': ReconfigVM_Task, 'duration_secs': 0.44172} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.048401] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Reconfigured VM instance instance-00000030 to attach disk [datastore1] volume-3c545470-bd6a-4b3f-859b-0dbc00c8fe24/volume-3c545470-bd6a-4b3f-859b-0dbc00c8fe24.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 959.054628] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fd28246-5d40-4b74-9eb5-a750a956ccdd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.073920] env[68569]: DEBUG oslo_vmware.api [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 959.073920] env[68569]: value = "task-3167384" [ 959.073920] env[68569]: _type = "Task" [ 959.073920] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.086264] env[68569]: DEBUG oslo_vmware.api [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167384, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.288636] env[68569]: DEBUG nova.compute.manager [req-9d1d3b3b-89e5-42df-a051-882d2e4ddee2 req-239760ac-4550-42ef-b3a7-713c7d3b4f38 service nova] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Received event network-vif-plugged-89cde5fd-0a91-4be2-8dba-e551f8bcb57e {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 959.288722] env[68569]: DEBUG oslo_concurrency.lockutils [req-9d1d3b3b-89e5-42df-a051-882d2e4ddee2 req-239760ac-4550-42ef-b3a7-713c7d3b4f38 service nova] Acquiring lock "7b95aece-35db-4eab-b221-c5eccd749eae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.289895] env[68569]: DEBUG oslo_concurrency.lockutils [req-9d1d3b3b-89e5-42df-a051-882d2e4ddee2 req-239760ac-4550-42ef-b3a7-713c7d3b4f38 service nova] Lock "7b95aece-35db-4eab-b221-c5eccd749eae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.289895] env[68569]: DEBUG oslo_concurrency.lockutils [req-9d1d3b3b-89e5-42df-a051-882d2e4ddee2 req-239760ac-4550-42ef-b3a7-713c7d3b4f38 service nova] Lock "7b95aece-35db-4eab-b221-c5eccd749eae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.289895] env[68569]: DEBUG nova.compute.manager [req-9d1d3b3b-89e5-42df-a051-882d2e4ddee2 req-239760ac-4550-42ef-b3a7-713c7d3b4f38 service nova] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] No waiting events found dispatching network-vif-plugged-89cde5fd-0a91-4be2-8dba-e551f8bcb57e {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 959.289895] env[68569]: WARNING nova.compute.manager [req-9d1d3b3b-89e5-42df-a051-882d2e4ddee2 req-239760ac-4550-42ef-b3a7-713c7d3b4f38 service nova] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Received unexpected event network-vif-plugged-89cde5fd-0a91-4be2-8dba-e551f8bcb57e for instance with vm_state building and task_state spawning. [ 959.352364] env[68569]: DEBUG nova.network.neutron [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Successfully updated port: 89cde5fd-0a91-4be2-8dba-e551f8bcb57e {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 959.357218] env[68569]: DEBUG nova.compute.manager [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 959.539592] env[68569]: DEBUG nova.compute.utils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 959.541353] env[68569]: DEBUG nova.compute.manager [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 959.541544] env[68569]: DEBUG nova.network.neutron [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 959.585920] env[68569]: DEBUG oslo_vmware.api [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167384, 'name': ReconfigVM_Task, 'duration_secs': 0.191395} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.586249] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633659', 'volume_id': '3c545470-bd6a-4b3f-859b-0dbc00c8fe24', 'name': 'volume-3c545470-bd6a-4b3f-859b-0dbc00c8fe24', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fd803a5e-8dbd-449e-b45d-1e6410a286e8', 'attached_at': '', 'detached_at': '', 'volume_id': '3c545470-bd6a-4b3f-859b-0dbc00c8fe24', 'serial': '3c545470-bd6a-4b3f-859b-0dbc00c8fe24'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 959.593184] env[68569]: DEBUG nova.policy [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '330a73f609f746d8b8c1a7eefe557c69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'decd2576711b41bbb25300d9db62643e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 959.847138] env[68569]: DEBUG nova.network.neutron [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Successfully created port: 9490c5cc-846a-4eee-aa25-a8544a7619d0 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 959.854897] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "refresh_cache-7b95aece-35db-4eab-b221-c5eccd749eae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.855045] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "refresh_cache-7b95aece-35db-4eab-b221-c5eccd749eae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.855191] env[68569]: DEBUG nova.network.neutron [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 959.880792] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.044779] env[68569]: DEBUG nova.compute.manager [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 960.329586] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59088c23-9a14-4817-8461-f3697a941b6a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.339134] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a99cf5-4625-4a4f-b712-83eac198511d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.373838] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6eae746-fd23-4a34-b736-73e1cfdebd31 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.382931] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c541f98-5a33-4ec7-962f-3de30d957bd1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.401310] env[68569]: DEBUG nova.compute.provider_tree [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 960.437457] env[68569]: DEBUG nova.network.neutron [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 960.633232] env[68569]: DEBUG nova.objects.instance [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lazy-loading 'flavor' on Instance uuid fd803a5e-8dbd-449e-b45d-1e6410a286e8 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 960.735796] env[68569]: DEBUG nova.network.neutron [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Updating instance_info_cache with network_info: [{"id": "89cde5fd-0a91-4be2-8dba-e551f8bcb57e", "address": "fa:16:3e:c6:38:f1", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89cde5fd-0a", "ovs_interfaceid": "89cde5fd-0a91-4be2-8dba-e551f8bcb57e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.908137] env[68569]: DEBUG nova.scheduler.client.report [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 961.062028] env[68569]: DEBUG nova.compute.manager [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 961.089035] env[68569]: DEBUG nova.virt.hardware [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 961.089280] env[68569]: DEBUG nova.virt.hardware [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 961.089438] env[68569]: DEBUG nova.virt.hardware [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 961.089615] env[68569]: DEBUG nova.virt.hardware [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 961.089758] env[68569]: DEBUG nova.virt.hardware [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 961.089927] env[68569]: DEBUG nova.virt.hardware [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 961.090434] env[68569]: DEBUG nova.virt.hardware [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 961.090717] env[68569]: DEBUG nova.virt.hardware [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 961.090917] env[68569]: DEBUG nova.virt.hardware [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 961.091164] env[68569]: DEBUG nova.virt.hardware [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 961.091282] env[68569]: DEBUG nova.virt.hardware [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 961.092603] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5099b63-b31b-45df-89d6-c969f6ebb994 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.101141] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72ef993-eb0c-43ff-ab71-d3cd3569568e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.141800] env[68569]: DEBUG oslo_concurrency.lockutils [None req-82055ebb-7767-497a-b7c4-b63a17445840 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.815s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.239163] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "refresh_cache-7b95aece-35db-4eab-b221-c5eccd749eae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.239521] env[68569]: DEBUG nova.compute.manager [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Instance network_info: |[{"id": "89cde5fd-0a91-4be2-8dba-e551f8bcb57e", "address": "fa:16:3e:c6:38:f1", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89cde5fd-0a", "ovs_interfaceid": "89cde5fd-0a91-4be2-8dba-e551f8bcb57e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 961.239990] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:38:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54c45719-5690-47bf-b45b-6cad9813071e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89cde5fd-0a91-4be2-8dba-e551f8bcb57e', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 961.247571] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 961.247818] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 961.248066] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48409ff1-3dac-4f3a-9b21-9699589f2537 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.270482] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 961.270482] env[68569]: value = "task-3167385" [ 961.270482] env[68569]: _type = "Task" [ 961.270482] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.279848] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167385, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.318570] env[68569]: DEBUG nova.compute.manager [req-6c1c326b-e0c6-483c-8cd7-63385e6d3df3 req-f60b6ebb-0697-4ec2-b2aa-5ac325923961 service nova] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Received event network-changed-89cde5fd-0a91-4be2-8dba-e551f8bcb57e {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 961.318750] env[68569]: DEBUG nova.compute.manager [req-6c1c326b-e0c6-483c-8cd7-63385e6d3df3 req-f60b6ebb-0697-4ec2-b2aa-5ac325923961 service nova] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Refreshing instance network info cache due to event network-changed-89cde5fd-0a91-4be2-8dba-e551f8bcb57e. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 961.318965] env[68569]: DEBUG oslo_concurrency.lockutils [req-6c1c326b-e0c6-483c-8cd7-63385e6d3df3 req-f60b6ebb-0697-4ec2-b2aa-5ac325923961 service nova] Acquiring lock "refresh_cache-7b95aece-35db-4eab-b221-c5eccd749eae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.319240] env[68569]: DEBUG oslo_concurrency.lockutils [req-6c1c326b-e0c6-483c-8cd7-63385e6d3df3 req-f60b6ebb-0697-4ec2-b2aa-5ac325923961 service nova] Acquired lock "refresh_cache-7b95aece-35db-4eab-b221-c5eccd749eae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.319413] env[68569]: DEBUG nova.network.neutron [req-6c1c326b-e0c6-483c-8cd7-63385e6d3df3 req-f60b6ebb-0697-4ec2-b2aa-5ac325923961 service nova] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Refreshing network info cache for port 89cde5fd-0a91-4be2-8dba-e551f8bcb57e {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 961.357624] env[68569]: DEBUG nova.network.neutron [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Successfully updated port: 9490c5cc-846a-4eee-aa25-a8544a7619d0 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 961.412820] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.384s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.413552] env[68569]: DEBUG nova.compute.manager [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 961.417182] env[68569]: DEBUG oslo_concurrency.lockutils [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.530s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.744283] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c04ff7-db1b-4ea7-9f18-70eb8dccf1c1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.753249] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0dc983-7a63-4043-a477-e735a9a1e135 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.792473] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a37d3b7-830c-4a84-aeaf-b85d64049821 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.804485] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37acd00-3743-48c3-a743-2b746ff833d7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.809250] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167385, 'name': CreateVM_Task, 'duration_secs': 0.45068} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.809250] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 961.809774] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.809949] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.810282] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 961.810537] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dcae3e3-8f14-4a8a-81b9-ff6f7284e211 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.821323] env[68569]: DEBUG nova.compute.provider_tree [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.827303] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 961.827303] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b38822-2a9d-04bd-c693-dba8c26e67da" [ 961.827303] env[68569]: _type = "Task" [ 961.827303] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.837090] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b38822-2a9d-04bd-c693-dba8c26e67da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.861115] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "refresh_cache-adc7f255-be88-4eda-be25-f9ecc9d9bf99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.861277] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "refresh_cache-adc7f255-be88-4eda-be25-f9ecc9d9bf99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.861467] env[68569]: DEBUG nova.network.neutron [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 961.923164] env[68569]: DEBUG nova.compute.utils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 961.924533] env[68569]: DEBUG nova.compute.manager [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 961.925042] env[68569]: DEBUG nova.network.neutron [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 961.964509] env[68569]: DEBUG nova.policy [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54ebbdfe9bfb4854a40b07d60c7a9efb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f361997374e943cfa7a8e4e4884d6c65', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 962.056640] env[68569]: DEBUG nova.network.neutron [req-6c1c326b-e0c6-483c-8cd7-63385e6d3df3 req-f60b6ebb-0697-4ec2-b2aa-5ac325923961 service nova] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Updated VIF entry in instance network info cache for port 89cde5fd-0a91-4be2-8dba-e551f8bcb57e. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 962.057011] env[68569]: DEBUG nova.network.neutron [req-6c1c326b-e0c6-483c-8cd7-63385e6d3df3 req-f60b6ebb-0697-4ec2-b2aa-5ac325923961 service nova] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Updating instance_info_cache with network_info: [{"id": "89cde5fd-0a91-4be2-8dba-e551f8bcb57e", "address": "fa:16:3e:c6:38:f1", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89cde5fd-0a", "ovs_interfaceid": "89cde5fd-0a91-4be2-8dba-e551f8bcb57e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.171353] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "1689f1f3-53f2-4c02-a969-e4dae21f14b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.171641] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "1689f1f3-53f2-4c02-a969-e4dae21f14b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.286136] env[68569]: DEBUG nova.network.neutron [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Successfully created port: 6c2568f4-2500-47df-982b-bf146c8d29d5 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 962.326945] env[68569]: DEBUG nova.scheduler.client.report [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 962.341964] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b38822-2a9d-04bd-c693-dba8c26e67da, 'name': SearchDatastore_Task, 'duration_secs': 0.015744} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.342515] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.342851] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 962.343166] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.343579] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.344759] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 962.344759] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2834440f-28c3-47bb-acf8-5c32c57a720e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.354629] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 962.354866] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 962.355839] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-008ac938-9841-446a-839e-ec3920fa4e29 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.362563] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 962.362563] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fa1137-6660-19e1-e28b-956e4d075646" [ 962.362563] env[68569]: _type = "Task" [ 962.362563] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.373776] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fa1137-6660-19e1-e28b-956e4d075646, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.403780] env[68569]: DEBUG nova.network.neutron [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 962.428051] env[68569]: DEBUG nova.compute.manager [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 962.560450] env[68569]: DEBUG oslo_concurrency.lockutils [req-6c1c326b-e0c6-483c-8cd7-63385e6d3df3 req-f60b6ebb-0697-4ec2-b2aa-5ac325923961 service nova] Releasing lock "refresh_cache-7b95aece-35db-4eab-b221-c5eccd749eae" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.560735] env[68569]: DEBUG nova.compute.manager [req-6c1c326b-e0c6-483c-8cd7-63385e6d3df3 req-f60b6ebb-0697-4ec2-b2aa-5ac325923961 service nova] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Received event network-vif-plugged-9490c5cc-846a-4eee-aa25-a8544a7619d0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 962.560951] env[68569]: DEBUG oslo_concurrency.lockutils [req-6c1c326b-e0c6-483c-8cd7-63385e6d3df3 req-f60b6ebb-0697-4ec2-b2aa-5ac325923961 service nova] Acquiring lock "adc7f255-be88-4eda-be25-f9ecc9d9bf99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.561178] env[68569]: DEBUG oslo_concurrency.lockutils [req-6c1c326b-e0c6-483c-8cd7-63385e6d3df3 req-f60b6ebb-0697-4ec2-b2aa-5ac325923961 service nova] Lock "adc7f255-be88-4eda-be25-f9ecc9d9bf99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.561343] env[68569]: DEBUG oslo_concurrency.lockutils [req-6c1c326b-e0c6-483c-8cd7-63385e6d3df3 req-f60b6ebb-0697-4ec2-b2aa-5ac325923961 service nova] Lock "adc7f255-be88-4eda-be25-f9ecc9d9bf99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.561551] env[68569]: DEBUG nova.compute.manager [req-6c1c326b-e0c6-483c-8cd7-63385e6d3df3 req-f60b6ebb-0697-4ec2-b2aa-5ac325923961 service nova] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] No waiting events found dispatching network-vif-plugged-9490c5cc-846a-4eee-aa25-a8544a7619d0 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 962.561726] env[68569]: WARNING nova.compute.manager [req-6c1c326b-e0c6-483c-8cd7-63385e6d3df3 req-f60b6ebb-0697-4ec2-b2aa-5ac325923961 service nova] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Received unexpected event network-vif-plugged-9490c5cc-846a-4eee-aa25-a8544a7619d0 for instance with vm_state building and task_state spawning. [ 962.565038] env[68569]: DEBUG nova.network.neutron [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Updating instance_info_cache with network_info: [{"id": "9490c5cc-846a-4eee-aa25-a8544a7619d0", "address": "fa:16:3e:0a:06:84", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9490c5cc-84", "ovs_interfaceid": "9490c5cc-846a-4eee-aa25-a8544a7619d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.566384] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Acquiring lock "060fc4c8-b173-4fc4-8232-e13e3eac9dc3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.567312] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Lock "060fc4c8-b173-4fc4-8232-e13e3eac9dc3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.567649] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Acquiring lock "060fc4c8-b173-4fc4-8232-e13e3eac9dc3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.567902] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Lock "060fc4c8-b173-4fc4-8232-e13e3eac9dc3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.568150] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Lock "060fc4c8-b173-4fc4-8232-e13e3eac9dc3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.570277] env[68569]: INFO nova.compute.manager [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Terminating instance [ 962.675602] env[68569]: DEBUG nova.compute.manager [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 962.835966] env[68569]: DEBUG oslo_concurrency.lockutils [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.419s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.836309] env[68569]: INFO nova.compute.manager [None req-841ac650-d03a-4c58-82bc-28d9bbf3fa46 tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Successfully reverted task state from rebuilding on failure for instance. [ 962.841410] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.847s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.841632] env[68569]: DEBUG nova.objects.instance [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lazy-loading 'resources' on Instance uuid e9994248-0240-412b-9e60-a04b00e5c0cd {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 962.873614] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fa1137-6660-19e1-e28b-956e4d075646, 'name': SearchDatastore_Task, 'duration_secs': 0.011183} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.874434] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca176cd7-d8fc-44ac-b03c-9bcc6d0a0c2b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.880162] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 962.880162] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523b7d17-f861-81ee-a45e-a54aa0735f16" [ 962.880162] env[68569]: _type = "Task" [ 962.880162] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.888848] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523b7d17-f861-81ee-a45e-a54aa0735f16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.071754] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "refresh_cache-adc7f255-be88-4eda-be25-f9ecc9d9bf99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.071896] env[68569]: DEBUG nova.compute.manager [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Instance network_info: |[{"id": "9490c5cc-846a-4eee-aa25-a8544a7619d0", "address": "fa:16:3e:0a:06:84", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9490c5cc-84", "ovs_interfaceid": "9490c5cc-846a-4eee-aa25-a8544a7619d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 963.072229] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:06:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db328342-7107-4bac-b1d6-111fbd5780f1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9490c5cc-846a-4eee-aa25-a8544a7619d0', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 963.079890] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 963.080445] env[68569]: DEBUG nova.compute.manager [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 963.080642] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 963.080884] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 963.082150] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8b0862-ffdd-43fd-84dd-9aa09ef61d2b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.084893] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f47a445-4b17-44c8-b773-ac867c59d245 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.105044] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 963.106414] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-563ef504-0800-4311-8477-0e589ab10e8e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.108079] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 963.108079] env[68569]: value = "task-3167386" [ 963.108079] env[68569]: _type = "Task" [ 963.108079] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.112978] env[68569]: DEBUG oslo_vmware.api [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Waiting for the task: (returnval){ [ 963.112978] env[68569]: value = "task-3167387" [ 963.112978] env[68569]: _type = "Task" [ 963.112978] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.118995] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167386, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.125679] env[68569]: DEBUG oslo_vmware.api [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': task-3167387, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.203188] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.349599] env[68569]: DEBUG nova.compute.manager [req-e36e2558-9fc7-4e4d-b736-d74bfe4c3326 req-ec7175b7-8e82-4b16-be6a-ad601b1febf4 service nova] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Received event network-changed-9490c5cc-846a-4eee-aa25-a8544a7619d0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 963.349599] env[68569]: DEBUG nova.compute.manager [req-e36e2558-9fc7-4e4d-b736-d74bfe4c3326 req-ec7175b7-8e82-4b16-be6a-ad601b1febf4 service nova] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Refreshing instance network info cache due to event network-changed-9490c5cc-846a-4eee-aa25-a8544a7619d0. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 963.349599] env[68569]: DEBUG oslo_concurrency.lockutils [req-e36e2558-9fc7-4e4d-b736-d74bfe4c3326 req-ec7175b7-8e82-4b16-be6a-ad601b1febf4 service nova] Acquiring lock "refresh_cache-adc7f255-be88-4eda-be25-f9ecc9d9bf99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.349599] env[68569]: DEBUG oslo_concurrency.lockutils [req-e36e2558-9fc7-4e4d-b736-d74bfe4c3326 req-ec7175b7-8e82-4b16-be6a-ad601b1febf4 service nova] Acquired lock "refresh_cache-adc7f255-be88-4eda-be25-f9ecc9d9bf99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.349599] env[68569]: DEBUG nova.network.neutron [req-e36e2558-9fc7-4e4d-b736-d74bfe4c3326 req-ec7175b7-8e82-4b16-be6a-ad601b1febf4 service nova] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Refreshing network info cache for port 9490c5cc-846a-4eee-aa25-a8544a7619d0 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 963.394076] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523b7d17-f861-81ee-a45e-a54aa0735f16, 'name': SearchDatastore_Task, 'duration_secs': 0.011008} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.394408] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.394706] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 7b95aece-35db-4eab-b221-c5eccd749eae/7b95aece-35db-4eab-b221-c5eccd749eae.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 963.395107] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-24c3df6a-e1f3-4b77-8fa7-f1ab4636c787 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.406692] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 963.406692] env[68569]: value = "task-3167388" [ 963.406692] env[68569]: _type = "Task" [ 963.406692] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.418286] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167388, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.437662] env[68569]: DEBUG nova.compute.manager [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 963.468298] env[68569]: DEBUG nova.virt.hardware [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 963.468298] env[68569]: DEBUG nova.virt.hardware [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 963.468463] env[68569]: DEBUG nova.virt.hardware [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 963.468636] env[68569]: DEBUG nova.virt.hardware [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 963.468791] env[68569]: DEBUG nova.virt.hardware [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 963.469023] env[68569]: DEBUG nova.virt.hardware [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 963.469156] env[68569]: DEBUG nova.virt.hardware [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 963.469329] env[68569]: DEBUG nova.virt.hardware [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 963.469508] env[68569]: DEBUG nova.virt.hardware [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 963.469720] env[68569]: DEBUG nova.virt.hardware [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 963.469866] env[68569]: DEBUG nova.virt.hardware [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 963.470897] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744a91ce-d599-486c-95fc-eddbf06c2a7d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.484047] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171aac73-439c-4b53-9a08-0719d85e9552 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.639111] env[68569]: DEBUG oslo_vmware.api [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': task-3167387, 'name': PowerOffVM_Task, 'duration_secs': 0.401856} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.639334] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167386, 'name': CreateVM_Task, 'duration_secs': 0.421328} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.639782] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 963.639782] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 963.640021] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 963.640202] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d350450b-2653-4a62-a513-ff3826fdd1aa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.642418] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.642586] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.642901] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 963.643197] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-817f99ed-33f9-4d2d-a98e-7b6959dd8e98 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.651971] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 963.651971] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a44a9d-210a-5cc6-4eff-9e4577ec11c5" [ 963.651971] env[68569]: _type = "Task" [ 963.651971] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.662296] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a44a9d-210a-5cc6-4eff-9e4577ec11c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.695033] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb36671-3869-4db5-9500-c3e3ed256049 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.704869] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba1cad3-4304-48f5-b066-bd3bd20a8886 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.753384] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15563d39-849c-4f2b-9c48-0a0d3a57f5d4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.759342] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 963.759342] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 963.759342] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Deleting the datastore file [datastore1] 060fc4c8-b173-4fc4-8232-e13e3eac9dc3 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 963.759342] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-132a2355-7cea-492b-889b-599f53dd339f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.765271] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e30b73c-8908-4cb0-b2b4-076d96ed3795 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.773074] env[68569]: DEBUG oslo_vmware.api [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Waiting for the task: (returnval){ [ 963.773074] env[68569]: value = "task-3167390" [ 963.773074] env[68569]: _type = "Task" [ 963.773074] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.785631] env[68569]: DEBUG nova.compute.provider_tree [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.793599] env[68569]: DEBUG oslo_vmware.api [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': task-3167390, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.828398] env[68569]: DEBUG nova.compute.manager [req-753452b5-9c8a-4540-9cfc-812d71ace6c5 req-420ff647-0ce2-42de-9d65-a66d3a9104be service nova] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Received event network-vif-plugged-6c2568f4-2500-47df-982b-bf146c8d29d5 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 963.828613] env[68569]: DEBUG oslo_concurrency.lockutils [req-753452b5-9c8a-4540-9cfc-812d71ace6c5 req-420ff647-0ce2-42de-9d65-a66d3a9104be service nova] Acquiring lock "0dc5da15-4c10-4754-ac82-a130b933295d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.828828] env[68569]: DEBUG oslo_concurrency.lockutils [req-753452b5-9c8a-4540-9cfc-812d71ace6c5 req-420ff647-0ce2-42de-9d65-a66d3a9104be service nova] Lock "0dc5da15-4c10-4754-ac82-a130b933295d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.829023] env[68569]: DEBUG oslo_concurrency.lockutils [req-753452b5-9c8a-4540-9cfc-812d71ace6c5 req-420ff647-0ce2-42de-9d65-a66d3a9104be service nova] Lock "0dc5da15-4c10-4754-ac82-a130b933295d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.829174] env[68569]: DEBUG nova.compute.manager [req-753452b5-9c8a-4540-9cfc-812d71ace6c5 req-420ff647-0ce2-42de-9d65-a66d3a9104be service nova] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] No waiting events found dispatching network-vif-plugged-6c2568f4-2500-47df-982b-bf146c8d29d5 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 963.829352] env[68569]: WARNING nova.compute.manager [req-753452b5-9c8a-4540-9cfc-812d71ace6c5 req-420ff647-0ce2-42de-9d65-a66d3a9104be service nova] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Received unexpected event network-vif-plugged-6c2568f4-2500-47df-982b-bf146c8d29d5 for instance with vm_state building and task_state spawning. [ 963.920596] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167388, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.966975] env[68569]: DEBUG nova.network.neutron [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Successfully updated port: 6c2568f4-2500-47df-982b-bf146c8d29d5 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 964.150294] env[68569]: DEBUG nova.network.neutron [req-e36e2558-9fc7-4e4d-b736-d74bfe4c3326 req-ec7175b7-8e82-4b16-be6a-ad601b1febf4 service nova] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Updated VIF entry in instance network info cache for port 9490c5cc-846a-4eee-aa25-a8544a7619d0. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 964.150678] env[68569]: DEBUG nova.network.neutron [req-e36e2558-9fc7-4e4d-b736-d74bfe4c3326 req-ec7175b7-8e82-4b16-be6a-ad601b1febf4 service nova] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Updating instance_info_cache with network_info: [{"id": "9490c5cc-846a-4eee-aa25-a8544a7619d0", "address": "fa:16:3e:0a:06:84", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9490c5cc-84", "ovs_interfaceid": "9490c5cc-846a-4eee-aa25-a8544a7619d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.162640] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a44a9d-210a-5cc6-4eff-9e4577ec11c5, 'name': SearchDatastore_Task, 'duration_secs': 0.064878} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.163559] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.163783] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 964.164049] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.164173] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.164360] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 964.164855] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aeb1a038-753e-4ce0-9cb0-42abd72877bd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.174834] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 964.175032] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 964.175738] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3277cf3-f5c8-45ae-8050-ec4fc91b9938 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.181814] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 964.181814] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520bb271-11b7-db04-09a9-d07643bd223d" [ 964.181814] env[68569]: _type = "Task" [ 964.181814] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.190144] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520bb271-11b7-db04-09a9-d07643bd223d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.283488] env[68569]: DEBUG oslo_vmware.api [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Task: {'id': task-3167390, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299415} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.283809] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 964.284047] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 964.284265] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 964.284473] env[68569]: INFO nova.compute.manager [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Took 1.20 seconds to destroy the instance on the hypervisor. [ 964.284757] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 964.284997] env[68569]: DEBUG nova.compute.manager [-] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 964.285111] env[68569]: DEBUG nova.network.neutron [-] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 964.288765] env[68569]: DEBUG nova.scheduler.client.report [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 964.419341] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167388, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.468433] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "refresh_cache-0dc5da15-4c10-4754-ac82-a130b933295d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.468654] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "refresh_cache-0dc5da15-4c10-4754-ac82-a130b933295d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.468871] env[68569]: DEBUG nova.network.neutron [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 964.653732] env[68569]: DEBUG oslo_concurrency.lockutils [req-e36e2558-9fc7-4e4d-b736-d74bfe4c3326 req-ec7175b7-8e82-4b16-be6a-ad601b1febf4 service nova] Releasing lock "refresh_cache-adc7f255-be88-4eda-be25-f9ecc9d9bf99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.697918] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520bb271-11b7-db04-09a9-d07643bd223d, 'name': SearchDatastore_Task, 'duration_secs': 0.010299} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.699090] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91dafcc1-e98c-4554-bcf9-fcc59319f98e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.705484] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 964.705484] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527983c9-4c3e-c732-9692-65bf82ee2173" [ 964.705484] env[68569]: _type = "Task" [ 964.705484] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.715050] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527983c9-4c3e-c732-9692-65bf82ee2173, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.793741] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.952s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.796333] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.121s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.796568] env[68569]: DEBUG nova.objects.instance [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lazy-loading 'resources' on Instance uuid b770fbd1-579a-4e3e-a5c9-9f030695f057 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 964.817388] env[68569]: INFO nova.scheduler.client.report [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Deleted allocations for instance e9994248-0240-412b-9e60-a04b00e5c0cd [ 964.921440] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167388, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.014804] env[68569]: DEBUG nova.network.neutron [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 965.169776] env[68569]: DEBUG nova.network.neutron [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Updating instance_info_cache with network_info: [{"id": "6c2568f4-2500-47df-982b-bf146c8d29d5", "address": "fa:16:3e:94:de:48", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c2568f4-25", "ovs_interfaceid": "6c2568f4-2500-47df-982b-bf146c8d29d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.219747] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527983c9-4c3e-c732-9692-65bf82ee2173, 'name': SearchDatastore_Task, 'duration_secs': 0.012317} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.220040] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.220372] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] adc7f255-be88-4eda-be25-f9ecc9d9bf99/adc7f255-be88-4eda-be25-f9ecc9d9bf99.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 965.220607] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77bd07cd-4e01-422e-bbf0-3a252711e4e1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.228834] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 965.228834] env[68569]: value = "task-3167391" [ 965.228834] env[68569]: _type = "Task" [ 965.228834] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.232667] env[68569]: DEBUG nova.network.neutron [-] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.239477] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167391, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.325086] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a3b197aa-5e82-4c43-b482-444ca78bf44e tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "e9994248-0240-412b-9e60-a04b00e5c0cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.732s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.378581] env[68569]: DEBUG nova.compute.manager [req-6c6763bf-996e-40f3-91c0-7f8e792d7f45 req-237b71d9-d425-4f8e-8b28-bce1e26c1da9 service nova] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Received event network-vif-deleted-7eb93f86-e36c-4fa0-838c-ff5929a60333 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 965.428563] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167388, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.536732} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.429312] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 7b95aece-35db-4eab-b221-c5eccd749eae/7b95aece-35db-4eab-b221-c5eccd749eae.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 965.429312] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 965.429525] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84ed409d-7bd8-410f-9f6e-d69b65d51e3f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.441023] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 965.441023] env[68569]: value = "task-3167392" [ 965.441023] env[68569]: _type = "Task" [ 965.441023] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.451868] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167392, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.560272] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.560518] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.560763] env[68569]: INFO nova.compute.manager [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Rebooting instance [ 965.646641] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0914b618-8cac-496d-974f-9d95d92b341e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.658071] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b288dc99-ad7b-42a9-abaf-48043062ce2f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.690492] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "refresh_cache-0dc5da15-4c10-4754-ac82-a130b933295d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.690876] env[68569]: DEBUG nova.compute.manager [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Instance network_info: |[{"id": "6c2568f4-2500-47df-982b-bf146c8d29d5", "address": "fa:16:3e:94:de:48", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c2568f4-25", "ovs_interfaceid": "6c2568f4-2500-47df-982b-bf146c8d29d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 965.691568] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:de:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f8442aa5-73db-4599-8564-b98a6ea26b9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c2568f4-2500-47df-982b-bf146c8d29d5', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.699370] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 965.700136] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ea5c2e-0da9-4dd9-957f-1fe171bc275e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.702815] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 965.703046] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-902708e4-89a2-44e8-9ea0-8ff270072bcf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.727989] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b97db8-1d4b-4b93-807a-51e52c5e469a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.730681] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.730681] env[68569]: value = "task-3167393" [ 965.730681] env[68569]: _type = "Task" [ 965.730681] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.744438] env[68569]: INFO nova.compute.manager [-] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Took 1.46 seconds to deallocate network for instance. [ 965.744944] env[68569]: DEBUG nova.compute.provider_tree [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.752399] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167391, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489085} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.759973] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] adc7f255-be88-4eda-be25-f9ecc9d9bf99/adc7f255-be88-4eda-be25-f9ecc9d9bf99.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 965.760308] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 965.760556] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167393, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.761573] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b819e538-218b-4192-8188-5d70c845edba {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.771636] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 965.771636] env[68569]: value = "task-3167394" [ 965.771636] env[68569]: _type = "Task" [ 965.771636] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.791532] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167394, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.876665] env[68569]: DEBUG nova.compute.manager [req-10cb2b91-67a3-44cb-90e3-c7dd7d9640a0 req-aea3e165-baac-4dee-80a7-70d5cc9405e2 service nova] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Received event network-changed-6c2568f4-2500-47df-982b-bf146c8d29d5 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 965.876858] env[68569]: DEBUG nova.compute.manager [req-10cb2b91-67a3-44cb-90e3-c7dd7d9640a0 req-aea3e165-baac-4dee-80a7-70d5cc9405e2 service nova] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Refreshing instance network info cache due to event network-changed-6c2568f4-2500-47df-982b-bf146c8d29d5. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 965.877101] env[68569]: DEBUG oslo_concurrency.lockutils [req-10cb2b91-67a3-44cb-90e3-c7dd7d9640a0 req-aea3e165-baac-4dee-80a7-70d5cc9405e2 service nova] Acquiring lock "refresh_cache-0dc5da15-4c10-4754-ac82-a130b933295d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.877316] env[68569]: DEBUG oslo_concurrency.lockutils [req-10cb2b91-67a3-44cb-90e3-c7dd7d9640a0 req-aea3e165-baac-4dee-80a7-70d5cc9405e2 service nova] Acquired lock "refresh_cache-0dc5da15-4c10-4754-ac82-a130b933295d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.877517] env[68569]: DEBUG nova.network.neutron [req-10cb2b91-67a3-44cb-90e3-c7dd7d9640a0 req-aea3e165-baac-4dee-80a7-70d5cc9405e2 service nova] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Refreshing network info cache for port 6c2568f4-2500-47df-982b-bf146c8d29d5 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 965.952065] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167392, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.164731} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.952065] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 965.952715] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4371bdb7-33b5-4ec9-b591-7b2e8bce8408 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.976979] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 7b95aece-35db-4eab-b221-c5eccd749eae/7b95aece-35db-4eab-b221-c5eccd749eae.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 965.977319] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b505b892-13a4-4576-80c8-6a453b581c04 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.998833] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 965.998833] env[68569]: value = "task-3167395" [ 965.998833] env[68569]: _type = "Task" [ 965.998833] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.008244] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167395, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.079026] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.079224] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.079403] env[68569]: DEBUG nova.network.neutron [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 966.241848] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167393, 'name': CreateVM_Task, 'duration_secs': 0.393822} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.242068] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 966.242892] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.243164] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.244026] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 966.244026] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea45c113-21f3-4c1e-abd2-4e82513ccc6c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.250188] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 966.250188] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529b9260-b64c-b94c-4431-da0f2777af41" [ 966.250188] env[68569]: _type = "Task" [ 966.250188] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.258632] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529b9260-b64c-b94c-4431-da0f2777af41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.260639] env[68569]: DEBUG nova.scheduler.client.report [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 966.264298] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.284821] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167394, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07919} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.285173] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 966.285995] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac38be9-78fa-4a97-a64f-d15a1d1f3ae8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.309988] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] adc7f255-be88-4eda-be25-f9ecc9d9bf99/adc7f255-be88-4eda-be25-f9ecc9d9bf99.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 966.310680] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4bbf6f2a-7300-47ce-a62d-655a5b11cd7b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.332283] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 966.332283] env[68569]: value = "task-3167396" [ 966.332283] env[68569]: _type = "Task" [ 966.332283] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.342131] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167396, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.458233] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ccf535-474a-92c6-6762-d603e7915397/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 966.459129] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6134f6e9-adfe-4865-b569-b88b6c22528f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.466253] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ccf535-474a-92c6-6762-d603e7915397/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 966.466439] env[68569]: ERROR oslo_vmware.rw_handles [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ccf535-474a-92c6-6762-d603e7915397/disk-0.vmdk due to incomplete transfer. [ 966.466709] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2d3edb67-62fd-47a6-bdc8-1f74e1a4c932 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.479016] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ccf535-474a-92c6-6762-d603e7915397/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 966.479268] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Uploaded image d761f48a-3fee-45c6-aed3-ecc0a2e681e2 to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 966.481719] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 966.482019] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8899052a-4408-482c-a3f1-07b468944ec9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.490189] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 966.490189] env[68569]: value = "task-3167397" [ 966.490189] env[68569]: _type = "Task" [ 966.490189] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.498348] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167397, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.509116] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167395, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.645064] env[68569]: DEBUG nova.network.neutron [req-10cb2b91-67a3-44cb-90e3-c7dd7d9640a0 req-aea3e165-baac-4dee-80a7-70d5cc9405e2 service nova] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Updated VIF entry in instance network info cache for port 6c2568f4-2500-47df-982b-bf146c8d29d5. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 966.645592] env[68569]: DEBUG nova.network.neutron [req-10cb2b91-67a3-44cb-90e3-c7dd7d9640a0 req-aea3e165-baac-4dee-80a7-70d5cc9405e2 service nova] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Updating instance_info_cache with network_info: [{"id": "6c2568f4-2500-47df-982b-bf146c8d29d5", "address": "fa:16:3e:94:de:48", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c2568f4-25", "ovs_interfaceid": "6c2568f4-2500-47df-982b-bf146c8d29d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.760544] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529b9260-b64c-b94c-4431-da0f2777af41, 'name': SearchDatastore_Task, 'duration_secs': 0.011055} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.760844] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.761093] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.761323] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.761528] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.761722] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 966.761973] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2de50ce-f7ca-4259-8da3-f07f9a3505ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.765528] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.969s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.767698] env[68569]: DEBUG oslo_concurrency.lockutils [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.320s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.768217] env[68569]: DEBUG nova.objects.instance [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Lazy-loading 'resources' on Instance uuid a591b671-ca84-47b5-9831-63478d55fd07 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 966.773310] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 966.773503] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 966.774687] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19a9c02a-d18d-467e-940d-1e9c24723d4c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.783791] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 966.783791] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524518c2-1085-65f4-9666-69f377499ac3" [ 966.783791] env[68569]: _type = "Task" [ 966.783791] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.793740] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524518c2-1085-65f4-9666-69f377499ac3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.795580] env[68569]: INFO nova.scheduler.client.report [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleted allocations for instance b770fbd1-579a-4e3e-a5c9-9f030695f057 [ 966.809454] env[68569]: DEBUG nova.network.neutron [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Updating instance_info_cache with network_info: [{"id": "4711426c-2c79-4cc9-8144-86c583dd1fc2", "address": "fa:16:3e:58:9a:77", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4711426c-2c", "ovs_interfaceid": "4711426c-2c79-4cc9-8144-86c583dd1fc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.843398] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167396, 'name': ReconfigVM_Task, 'duration_secs': 0.31696} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.843666] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Reconfigured VM instance instance-00000052 to attach disk [datastore1] adc7f255-be88-4eda-be25-f9ecc9d9bf99/adc7f255-be88-4eda-be25-f9ecc9d9bf99.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.844304] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1df7c076-4b8d-4419-9a4b-097473655c6b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.853514] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 966.853514] env[68569]: value = "task-3167398" [ 966.853514] env[68569]: _type = "Task" [ 966.853514] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.862590] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167398, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.000602] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167397, 'name': Destroy_Task, 'duration_secs': 0.502887} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.003647] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Destroyed the VM [ 967.003888] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 967.004157] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a40ad7a2-75ac-4aeb-bb21-04366dab90c8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.012927] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167395, 'name': ReconfigVM_Task, 'duration_secs': 0.987326} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.013615] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 7b95aece-35db-4eab-b221-c5eccd749eae/7b95aece-35db-4eab-b221-c5eccd749eae.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 967.014567] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 967.014567] env[68569]: value = "task-3167399" [ 967.014567] env[68569]: _type = "Task" [ 967.014567] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.014847] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-00da5d83-cd68-4e97-a2d5-5063d4cbd748 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.026621] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167399, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.028085] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 967.028085] env[68569]: value = "task-3167400" [ 967.028085] env[68569]: _type = "Task" [ 967.028085] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.037870] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167400, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.148819] env[68569]: DEBUG oslo_concurrency.lockutils [req-10cb2b91-67a3-44cb-90e3-c7dd7d9640a0 req-aea3e165-baac-4dee-80a7-70d5cc9405e2 service nova] Releasing lock "refresh_cache-0dc5da15-4c10-4754-ac82-a130b933295d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.295352] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524518c2-1085-65f4-9666-69f377499ac3, 'name': SearchDatastore_Task, 'duration_secs': 0.013464} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.296145] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c9f799e-c86c-4120-8e5a-46f374e61ac6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.303220] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 967.303220] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e05547-4883-3be6-d488-57c0afb2165a" [ 967.303220] env[68569]: _type = "Task" [ 967.303220] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.305925] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5a709859-7538-47f6-aaee-9ac92b23d5ca tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "b770fbd1-579a-4e3e-a5c9-9f030695f057" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.173s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.312194] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.317996] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e05547-4883-3be6-d488-57c0afb2165a, 'name': SearchDatastore_Task, 'duration_secs': 0.012} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.318425] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.318765] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 0dc5da15-4c10-4754-ac82-a130b933295d/0dc5da15-4c10-4754-ac82-a130b933295d.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 967.319060] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af00299b-e24c-4d83-88be-8896053022c0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.332014] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 967.332014] env[68569]: value = "task-3167401" [ 967.332014] env[68569]: _type = "Task" [ 967.332014] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.347047] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167401, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.365199] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167398, 'name': Rename_Task, 'duration_secs': 0.158454} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.367920] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 967.368718] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bdfef3a8-de5c-4239-851e-ae0e4c4bbcdf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.378062] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 967.378062] env[68569]: value = "task-3167402" [ 967.378062] env[68569]: _type = "Task" [ 967.378062] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.391258] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167402, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.533114] env[68569]: DEBUG oslo_vmware.api [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167399, 'name': RemoveSnapshot_Task, 'duration_secs': 0.470687} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.542040] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 967.542366] env[68569]: INFO nova.compute.manager [None req-e697167c-f43d-44d7-a072-6364d0c6965c tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Took 14.98 seconds to snapshot the instance on the hypervisor. [ 967.554512] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167400, 'name': Rename_Task, 'duration_secs': 0.150198} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.555493] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 967.556518] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3f14efb-e203-46cf-a771-ebb05718d5d9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.572716] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 967.572716] env[68569]: value = "task-3167403" [ 967.572716] env[68569]: _type = "Task" [ 967.572716] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.593845] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167403, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.597862] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06effd3f-1f5e-4707-bc5d-0731bd290d66 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.611286] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ba1bbe-131a-4e3f-96a6-ab3f5ccc5e28 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.671101] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2681d5e-47d9-4b02-8f48-45c9d4f9ca77 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.683295] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1e2ff2-40d4-4378-ba64-aa6068956361 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.708973] env[68569]: DEBUG nova.compute.provider_tree [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 967.821896] env[68569]: DEBUG nova.compute.manager [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 967.823207] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c803100-468c-4331-9a53-534c54b7a8cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.839013] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "b40c9dec-cebc-4d23-8df4-96e804333706" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.839282] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "b40c9dec-cebc-4d23-8df4-96e804333706" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.839489] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "b40c9dec-cebc-4d23-8df4-96e804333706-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.839669] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "b40c9dec-cebc-4d23-8df4-96e804333706-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.839836] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "b40c9dec-cebc-4d23-8df4-96e804333706-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.842015] env[68569]: INFO nova.compute.manager [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Terminating instance [ 967.848256] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167401, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.891803] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167402, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.083821] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167403, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.212450] env[68569]: DEBUG nova.scheduler.client.report [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 968.346625] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167401, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.585208} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.346976] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 0dc5da15-4c10-4754-ac82-a130b933295d/0dc5da15-4c10-4754-ac82-a130b933295d.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 968.347229] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 968.347503] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b701a79-8df0-46c3-b7c1-94fee340b51c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.350076] env[68569]: DEBUG nova.compute.manager [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 968.350340] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.351032] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6aacf1-b449-463b-99e4-8722af1da6c6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.360390] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.361715] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01111ba2-570c-48e8-95fe-9fe3943baba3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.363493] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 968.363493] env[68569]: value = "task-3167404" [ 968.363493] env[68569]: _type = "Task" [ 968.363493] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.369996] env[68569]: DEBUG oslo_vmware.api [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 968.369996] env[68569]: value = "task-3167405" [ 968.369996] env[68569]: _type = "Task" [ 968.369996] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.376719] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167404, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.380669] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "6b2120d3-2e4b-4d1b-8109-6513b3b320eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.380991] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "6b2120d3-2e4b-4d1b-8109-6513b3b320eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.381294] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "6b2120d3-2e4b-4d1b-8109-6513b3b320eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.381484] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "6b2120d3-2e4b-4d1b-8109-6513b3b320eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.381914] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "6b2120d3-2e4b-4d1b-8109-6513b3b320eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.387240] env[68569]: DEBUG oslo_vmware.api [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167405, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.387762] env[68569]: INFO nova.compute.manager [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Terminating instance [ 968.401209] env[68569]: DEBUG oslo_vmware.api [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167402, 'name': PowerOnVM_Task, 'duration_secs': 0.99701} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.401997] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.402201] env[68569]: INFO nova.compute.manager [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Took 7.34 seconds to spawn the instance on the hypervisor. [ 968.402448] env[68569]: DEBUG nova.compute.manager [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 968.403223] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b911db-8e5e-4ed8-8ca8-a40d7949f686 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.584465] env[68569]: DEBUG oslo_vmware.api [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167403, 'name': PowerOnVM_Task, 'duration_secs': 0.659974} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.584465] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.584465] env[68569]: INFO nova.compute.manager [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Took 10.01 seconds to spawn the instance on the hypervisor. [ 968.584465] env[68569]: DEBUG nova.compute.manager [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 968.585890] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f49edc-a44b-4d60-a2eb-f4373912ffd3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.717561] env[68569]: DEBUG oslo_concurrency.lockutils [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.950s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.720673] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 11.041s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.845146] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec805af-d22f-4724-be36-d6349df0f78b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.853483] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Doing hard reboot of VM {{(pid=68569) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 968.853483] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-33a99bb0-c2a9-4066-b483-287387276616 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.862795] env[68569]: DEBUG oslo_vmware.api [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 968.862795] env[68569]: value = "task-3167406" [ 968.862795] env[68569]: _type = "Task" [ 968.862795] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.875983] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "ae92919c-f2eb-4231-afea-b23269e09a0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.876148] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "ae92919c-f2eb-4231-afea-b23269e09a0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.885429] env[68569]: DEBUG oslo_vmware.api [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167406, 'name': ResetVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.890564] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167404, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103478} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.893645] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 968.893983] env[68569]: DEBUG oslo_vmware.api [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167405, 'name': PowerOffVM_Task, 'duration_secs': 0.295291} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.895061] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7a434d-9417-42be-919a-0c413229eac7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.902024] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 968.902024] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 968.902024] env[68569]: DEBUG nova.compute.manager [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 968.902024] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.902024] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bdae18a3-2c2a-412e-b6e3-a213e6347cd2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.902798] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7fa0a9-4059-4837-a9ee-013aeb15f2d4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.935700] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 0dc5da15-4c10-4754-ac82-a130b933295d/0dc5da15-4c10-4754-ac82-a130b933295d.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 968.936681] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 968.938384] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ad62870-6eaa-4e75-b492-8f094bc3048e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.953743] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc318e70-b2e0-4b1b-bca1-69d2025dfbd9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.956610] env[68569]: INFO nova.compute.manager [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Took 16.73 seconds to build instance. [ 968.969917] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 968.969917] env[68569]: value = "task-3167409" [ 968.969917] env[68569]: _type = "Task" [ 968.969917] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.979082] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167409, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.002143] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 969.002487] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 969.002690] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleting the datastore file [datastore2] b40c9dec-cebc-4d23-8df4-96e804333706 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.002959] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05b0f634-0124-45ed-88ac-1a7d1e79e352 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.009570] env[68569]: DEBUG oslo_vmware.api [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 969.009570] env[68569]: value = "task-3167410" [ 969.009570] env[68569]: _type = "Task" [ 969.009570] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.019237] env[68569]: DEBUG oslo_vmware.api [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167410, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.043165] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 969.043552] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 969.043840] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleting the datastore file [datastore2] 6b2120d3-2e4b-4d1b-8109-6513b3b320eb {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.044224] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35e78409-b85d-4fc8-985c-f07dbfce1eef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.052308] env[68569]: DEBUG oslo_vmware.api [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 969.052308] env[68569]: value = "task-3167411" [ 969.052308] env[68569]: _type = "Task" [ 969.052308] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.061381] env[68569]: DEBUG oslo_vmware.api [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167411, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.106137] env[68569]: INFO nova.compute.manager [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Took 20.63 seconds to build instance. [ 969.226751] env[68569]: INFO nova.compute.claims [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 969.237227] env[68569]: DEBUG oslo_concurrency.lockutils [None req-49434954-e8a1-49d4-ac2a-68c24d1fa85d tempest-ServerActionsV293TestJSON-1911180199 tempest-ServerActionsV293TestJSON-1911180199-project-member] Lock "a591b671-ca84-47b5-9831-63478d55fd07" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.709s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.373804] env[68569]: DEBUG oslo_vmware.api [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167406, 'name': ResetVM_Task, 'duration_secs': 0.125905} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.374111] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Did hard reboot of VM {{(pid=68569) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 969.374304] env[68569]: DEBUG nova.compute.manager [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 969.375093] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d628caa0-617e-402d-9936-a01e8b9f0ee3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.384910] env[68569]: DEBUG nova.compute.manager [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 969.462056] env[68569]: DEBUG oslo_concurrency.lockutils [None req-01e84737-de6a-42e0-9f70-1858c06ec96e tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "adc7f255-be88-4eda-be25-f9ecc9d9bf99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.249s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.462460] env[68569]: DEBUG oslo_concurrency.lockutils [None req-abb3d58b-35c9-447f-8487-4a4397cc1cd7 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "adc7f255-be88-4eda-be25-f9ecc9d9bf99" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.462685] env[68569]: DEBUG oslo_concurrency.lockutils [None req-abb3d58b-35c9-447f-8487-4a4397cc1cd7 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "adc7f255-be88-4eda-be25-f9ecc9d9bf99" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.462864] env[68569]: DEBUG nova.compute.manager [None req-abb3d58b-35c9-447f-8487-4a4397cc1cd7 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 969.463806] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d4798f-89b8-45f9-b626-442b34d1d7db {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.472106] env[68569]: DEBUG nova.compute.manager [None req-abb3d58b-35c9-447f-8487-4a4397cc1cd7 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68569) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 969.472699] env[68569]: DEBUG nova.objects.instance [None req-abb3d58b-35c9-447f-8487-4a4397cc1cd7 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lazy-loading 'flavor' on Instance uuid adc7f255-be88-4eda-be25-f9ecc9d9bf99 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.483895] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167409, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.521260] env[68569]: DEBUG oslo_vmware.api [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167410, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.274061} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.521260] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.521417] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 969.521514] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.521656] env[68569]: INFO nova.compute.manager [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Took 1.17 seconds to destroy the instance on the hypervisor. [ 969.521890] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 969.522446] env[68569]: DEBUG nova.compute.manager [-] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 969.522542] env[68569]: DEBUG nova.network.neutron [-] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 969.569772] env[68569]: DEBUG oslo_vmware.api [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167411, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256812} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.570123] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.570258] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 969.570433] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.570605] env[68569]: INFO nova.compute.manager [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Took 0.67 seconds to destroy the instance on the hypervisor. [ 969.570844] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 969.571041] env[68569]: DEBUG nova.compute.manager [-] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 969.571141] env[68569]: DEBUG nova.network.neutron [-] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 969.605507] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1efb371e-fa48-4145-86c4-b1df9b4b180d tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "7b95aece-35db-4eab-b221-c5eccd749eae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.142s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.733394] env[68569]: INFO nova.compute.resource_tracker [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Updating resource usage from migration 3c07fed6-2f98-4b3f-9a0a-921364d9b5f8 [ 969.895136] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a38e16a9-9a60-47d4-8d27-27e71ff58453 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.334s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.911840] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.975609] env[68569]: DEBUG nova.compute.manager [req-8c30b51d-e769-4d9d-9cdb-2cbf59e8c554 req-12c48bb4-939d-4efa-97fe-e386545594aa service nova] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Received event network-vif-deleted-5f4944ec-908c-452e-9cdf-7b42d3277124 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 969.977031] env[68569]: INFO nova.compute.manager [req-8c30b51d-e769-4d9d-9cdb-2cbf59e8c554 req-12c48bb4-939d-4efa-97fe-e386545594aa service nova] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Neutron deleted interface 5f4944ec-908c-452e-9cdf-7b42d3277124; detaching it from the instance and deleting it from the info cache [ 969.977031] env[68569]: DEBUG nova.network.neutron [req-8c30b51d-e769-4d9d-9cdb-2cbf59e8c554 req-12c48bb4-939d-4efa-97fe-e386545594aa service nova] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.993788] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167409, 'name': ReconfigVM_Task, 'duration_secs': 0.604636} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.994961] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 0dc5da15-4c10-4754-ac82-a130b933295d/0dc5da15-4c10-4754-ac82-a130b933295d.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 969.995583] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-756f7641-7889-4044-b458-d24e7b6fc4df {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.006967] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 970.006967] env[68569]: value = "task-3167412" [ 970.006967] env[68569]: _type = "Task" [ 970.006967] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.019934] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167412, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.071414] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee055646-16ff-4701-9e6b-7984adc43ef2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.080591] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272f7d29-2a0a-4083-8de1-f4fb4c6a3e87 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.115313] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa75698-8c2b-414d-9096-cc409e79d954 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.131416] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf29a10-218d-4105-9978-0ecffa22b19f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.146629] env[68569]: DEBUG nova.compute.provider_tree [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.209504] env[68569]: DEBUG nova.compute.manager [req-03ae0f94-7696-4787-82ed-e64c2b81fa84 req-774a310f-9506-4f20-9837-0e2a350a00db service nova] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Received event network-vif-deleted-292385c9-acfb-49af-88d2-6fb5794c6268 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 970.209707] env[68569]: INFO nova.compute.manager [req-03ae0f94-7696-4787-82ed-e64c2b81fa84 req-774a310f-9506-4f20-9837-0e2a350a00db service nova] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Neutron deleted interface 292385c9-acfb-49af-88d2-6fb5794c6268; detaching it from the instance and deleting it from the info cache [ 970.209883] env[68569]: DEBUG nova.network.neutron [req-03ae0f94-7696-4787-82ed-e64c2b81fa84 req-774a310f-9506-4f20-9837-0e2a350a00db service nova] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.347584] env[68569]: DEBUG nova.network.neutron [-] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.482251] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-812fb624-bdf8-4fae-9680-2816981b8af9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.485880] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb3d58b-35c9-447f-8487-4a4397cc1cd7 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 970.486295] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17db47f5-1c6b-4761-a32a-3813a3155f49 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.499795] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27190560-2e7d-4362-bca2-e8839a9bc7e1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.518116] env[68569]: DEBUG oslo_vmware.api [None req-abb3d58b-35c9-447f-8487-4a4397cc1cd7 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 970.518116] env[68569]: value = "task-3167413" [ 970.518116] env[68569]: _type = "Task" [ 970.518116] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.534821] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167412, 'name': Rename_Task, 'duration_secs': 0.478401} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.539479] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 970.539884] env[68569]: DEBUG oslo_vmware.api [None req-abb3d58b-35c9-447f-8487-4a4397cc1cd7 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167413, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.558648] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe489b5a-dfd2-47a8-bd56-6b06de38a420 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.561185] env[68569]: DEBUG nova.compute.manager [req-8c30b51d-e769-4d9d-9cdb-2cbf59e8c554 req-12c48bb4-939d-4efa-97fe-e386545594aa service nova] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Detach interface failed, port_id=5f4944ec-908c-452e-9cdf-7b42d3277124, reason: Instance b40c9dec-cebc-4d23-8df4-96e804333706 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 970.568392] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 970.568392] env[68569]: value = "task-3167414" [ 970.568392] env[68569]: _type = "Task" [ 970.568392] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.581287] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167414, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.624403] env[68569]: DEBUG nova.network.neutron [-] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.625795] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "7b95aece-35db-4eab-b221-c5eccd749eae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.626016] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "7b95aece-35db-4eab-b221-c5eccd749eae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.626215] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "7b95aece-35db-4eab-b221-c5eccd749eae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.626475] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "7b95aece-35db-4eab-b221-c5eccd749eae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.626661] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "7b95aece-35db-4eab-b221-c5eccd749eae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.628560] env[68569]: INFO nova.compute.manager [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Terminating instance [ 970.650087] env[68569]: DEBUG nova.scheduler.client.report [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 970.713083] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-769fd392-eb27-414e-a0ed-18224a51654c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.724879] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73af500e-ea98-4a64-bc30-543158a07493 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.766775] env[68569]: DEBUG nova.compute.manager [req-03ae0f94-7696-4787-82ed-e64c2b81fa84 req-774a310f-9506-4f20-9837-0e2a350a00db service nova] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Detach interface failed, port_id=292385c9-acfb-49af-88d2-6fb5794c6268, reason: Instance 6b2120d3-2e4b-4d1b-8109-6513b3b320eb could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 970.850477] env[68569]: INFO nova.compute.manager [-] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Took 1.33 seconds to deallocate network for instance. [ 971.030355] env[68569]: DEBUG oslo_vmware.api [None req-abb3d58b-35c9-447f-8487-4a4397cc1cd7 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167413, 'name': PowerOffVM_Task, 'duration_secs': 0.25377} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.030662] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb3d58b-35c9-447f-8487-4a4397cc1cd7 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 971.030863] env[68569]: DEBUG nova.compute.manager [None req-abb3d58b-35c9-447f-8487-4a4397cc1cd7 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 971.031874] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b633aa-cb8d-471c-b066-ed77470a6155 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.080074] env[68569]: DEBUG oslo_vmware.api [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167414, 'name': PowerOnVM_Task, 'duration_secs': 0.47701} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.080074] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 971.080616] env[68569]: INFO nova.compute.manager [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Took 7.64 seconds to spawn the instance on the hypervisor. [ 971.080616] env[68569]: DEBUG nova.compute.manager [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 971.081389] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06e83ee-a409-405a-b74b-d01ca385c34b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.128616] env[68569]: INFO nova.compute.manager [-] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Took 1.56 seconds to deallocate network for instance. [ 971.133833] env[68569]: DEBUG nova.compute.manager [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 971.134025] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 971.135153] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923de94e-e570-4695-b3be-32108cb37509 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.144278] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 971.144511] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-859e1b71-a7b0-4a1f-8af5-ab5324247bf5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.151862] env[68569]: DEBUG oslo_vmware.api [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 971.151862] env[68569]: value = "task-3167415" [ 971.151862] env[68569]: _type = "Task" [ 971.151862] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.156066] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.435s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.156233] env[68569]: INFO nova.compute.manager [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Migrating [ 971.164455] env[68569]: DEBUG oslo_concurrency.lockutils [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.251s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.164687] env[68569]: DEBUG nova.objects.instance [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lazy-loading 'resources' on Instance uuid 123a6895-af16-493a-afce-7ae6c2137422 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 971.181302] env[68569]: DEBUG oslo_vmware.api [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.358932] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.546369] env[68569]: DEBUG oslo_concurrency.lockutils [None req-abb3d58b-35c9-447f-8487-4a4397cc1cd7 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "adc7f255-be88-4eda-be25-f9ecc9d9bf99" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.084s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.600400] env[68569]: INFO nova.compute.manager [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Took 19.19 seconds to build instance. [ 971.634909] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.663108] env[68569]: DEBUG oslo_vmware.api [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167415, 'name': PowerOffVM_Task, 'duration_secs': 0.228936} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.663238] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 971.663410] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 971.663630] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df48b640-17cd-4f68-b874-cce4aff8c97a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.681317] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "refresh_cache-f8b56e81-f3ef-489b-a64c-be687cf99fd1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.683596] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "refresh_cache-f8b56e81-f3ef-489b-a64c-be687cf99fd1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.683596] env[68569]: DEBUG nova.network.neutron [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 971.728498] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 971.728740] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 971.728938] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleting the datastore file [datastore1] 7b95aece-35db-4eab-b221-c5eccd749eae {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 971.729281] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f30d2a30-9bcb-4e5b-9cb6-703d212fef8d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.740098] env[68569]: DEBUG oslo_vmware.api [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 971.740098] env[68569]: value = "task-3167417" [ 971.740098] env[68569]: _type = "Task" [ 971.740098] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.750174] env[68569]: DEBUG oslo_vmware.api [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167417, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.952579] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a352a6-9f96-4650-b0c9-df257ba04321 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.961189] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c47389f-5ace-459a-b874-8c745c7dcf50 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.992347] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-303daad7-59db-4690-8687-767fb785bcf0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.000755] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cfd5e48-b797-447d-aa4b-ef745c1244a8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.015272] env[68569]: DEBUG nova.compute.provider_tree [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.040383] env[68569]: DEBUG oslo_concurrency.lockutils [None req-513b4eeb-1805-4c93-9a85-af234befc557 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "0dc5da15-4c10-4754-ac82-a130b933295d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.103158] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0d0758c7-b7e8-4da6-b79b-fdb47c8bce44 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "0dc5da15-4c10-4754-ac82-a130b933295d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.701s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.103466] env[68569]: DEBUG oslo_concurrency.lockutils [None req-513b4eeb-1805-4c93-9a85-af234befc557 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "0dc5da15-4c10-4754-ac82-a130b933295d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.063s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.103664] env[68569]: DEBUG nova.compute.manager [None req-513b4eeb-1805-4c93-9a85-af234befc557 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 972.104559] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b03d54-51c0-4101-9a2e-48d2c34964bf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.112315] env[68569]: DEBUG nova.compute.manager [None req-513b4eeb-1805-4c93-9a85-af234befc557 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68569) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 972.112882] env[68569]: DEBUG nova.objects.instance [None req-513b4eeb-1805-4c93-9a85-af234befc557 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lazy-loading 'flavor' on Instance uuid 0dc5da15-4c10-4754-ac82-a130b933295d {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.236895] env[68569]: DEBUG oslo_concurrency.lockutils [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "adc7f255-be88-4eda-be25-f9ecc9d9bf99" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.237967] env[68569]: DEBUG oslo_concurrency.lockutils [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "adc7f255-be88-4eda-be25-f9ecc9d9bf99" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.238376] env[68569]: DEBUG oslo_concurrency.lockutils [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "adc7f255-be88-4eda-be25-f9ecc9d9bf99-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.238700] env[68569]: DEBUG oslo_concurrency.lockutils [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "adc7f255-be88-4eda-be25-f9ecc9d9bf99-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.239019] env[68569]: DEBUG oslo_concurrency.lockutils [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "adc7f255-be88-4eda-be25-f9ecc9d9bf99-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.248023] env[68569]: INFO nova.compute.manager [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Terminating instance [ 972.259654] env[68569]: DEBUG oslo_vmware.api [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167417, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143619} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.260501] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 972.260811] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 972.261109] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 972.261388] env[68569]: INFO nova.compute.manager [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Took 1.13 seconds to destroy the instance on the hypervisor. [ 972.261846] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 972.262146] env[68569]: DEBUG nova.compute.manager [-] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 972.262329] env[68569]: DEBUG nova.network.neutron [-] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 972.425598] env[68569]: DEBUG nova.network.neutron [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Updating instance_info_cache with network_info: [{"id": "2455c808-85b7-415e-9905-53066039a3f3", "address": "fa:16:3e:45:d7:46", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2455c808-85", "ovs_interfaceid": "2455c808-85b7-415e-9905-53066039a3f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.519121] env[68569]: DEBUG nova.scheduler.client.report [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 972.598950] env[68569]: DEBUG nova.compute.manager [req-40bcf80f-eeb9-41c5-8395-011e5950e02d req-0b015875-abf4-4107-898b-448167b8298e service nova] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Received event network-vif-deleted-89cde5fd-0a91-4be2-8dba-e551f8bcb57e {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 972.598950] env[68569]: INFO nova.compute.manager [req-40bcf80f-eeb9-41c5-8395-011e5950e02d req-0b015875-abf4-4107-898b-448167b8298e service nova] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Neutron deleted interface 89cde5fd-0a91-4be2-8dba-e551f8bcb57e; detaching it from the instance and deleting it from the info cache [ 972.598950] env[68569]: DEBUG nova.network.neutron [req-40bcf80f-eeb9-41c5-8395-011e5950e02d req-0b015875-abf4-4107-898b-448167b8298e service nova] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.762066] env[68569]: DEBUG nova.compute.manager [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 972.762369] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 972.763526] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62283ffe-6461-45e2-a02c-f597cadfeec9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.773060] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 972.773060] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05cead17-4e6c-446d-9e87-5fc8739870ac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.861431] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 972.861644] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 972.861825] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleting the datastore file [datastore1] adc7f255-be88-4eda-be25-f9ecc9d9bf99 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 972.863169] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-91f5455e-bd73-4235-8479-64efc7e9a1c2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.870311] env[68569]: DEBUG oslo_vmware.api [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 972.870311] env[68569]: value = "task-3167419" [ 972.870311] env[68569]: _type = "Task" [ 972.870311] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.881123] env[68569]: DEBUG oslo_vmware.api [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167419, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.930032] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "refresh_cache-f8b56e81-f3ef-489b-a64c-be687cf99fd1" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.026051] env[68569]: DEBUG oslo_concurrency.lockutils [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.861s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.028170] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.147s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.030520] env[68569]: INFO nova.compute.claims [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 973.047638] env[68569]: INFO nova.scheduler.client.report [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Deleted allocations for instance 123a6895-af16-493a-afce-7ae6c2137422 [ 973.073838] env[68569]: DEBUG nova.network.neutron [-] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.100889] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-24d596ce-1c3e-4977-b549-32d29e6fad53 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.111970] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea724e2f-4190-4b9b-a348-a9f2c0cd9127 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.125115] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-513b4eeb-1805-4c93-9a85-af234befc557 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.125115] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d0c3d7e-2f95-40bf-8c54-093ad5a9b876 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.133090] env[68569]: DEBUG oslo_vmware.api [None req-513b4eeb-1805-4c93-9a85-af234befc557 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 973.133090] env[68569]: value = "task-3167420" [ 973.133090] env[68569]: _type = "Task" [ 973.133090] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.140993] env[68569]: DEBUG oslo_vmware.api [None req-513b4eeb-1805-4c93-9a85-af234befc557 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167420, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.152701] env[68569]: DEBUG nova.compute.manager [req-40bcf80f-eeb9-41c5-8395-011e5950e02d req-0b015875-abf4-4107-898b-448167b8298e service nova] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Detach interface failed, port_id=89cde5fd-0a91-4be2-8dba-e551f8bcb57e, reason: Instance 7b95aece-35db-4eab-b221-c5eccd749eae could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 973.380902] env[68569]: DEBUG oslo_vmware.api [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167419, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134844} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.381187] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 973.381316] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 973.381481] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 973.381655] env[68569]: INFO nova.compute.manager [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Took 0.62 seconds to destroy the instance on the hypervisor. [ 973.381894] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 973.382111] env[68569]: DEBUG nova.compute.manager [-] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 973.382207] env[68569]: DEBUG nova.network.neutron [-] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 973.555618] env[68569]: DEBUG oslo_concurrency.lockutils [None req-56af6753-1b1f-4d55-95d1-27f6998afed3 tempest-ServerRescueTestJSON-1383878555 tempest-ServerRescueTestJSON-1383878555-project-member] Lock "123a6895-af16-493a-afce-7ae6c2137422" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.781s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.576512] env[68569]: INFO nova.compute.manager [-] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Took 1.31 seconds to deallocate network for instance. [ 973.644938] env[68569]: DEBUG oslo_vmware.api [None req-513b4eeb-1805-4c93-9a85-af234befc557 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167420, 'name': PowerOffVM_Task, 'duration_secs': 0.191925} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.648017] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-513b4eeb-1805-4c93-9a85-af234befc557 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 973.648017] env[68569]: DEBUG nova.compute.manager [None req-513b4eeb-1805-4c93-9a85-af234befc557 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 973.648017] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8672a13-7015-4fde-83b7-fa50b46d1922 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.086648] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.138369] env[68569]: DEBUG nova.network.neutron [-] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.168020] env[68569]: DEBUG oslo_concurrency.lockutils [None req-513b4eeb-1805-4c93-9a85-af234befc557 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "0dc5da15-4c10-4754-ac82-a130b933295d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.061s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.374474] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ba10aa-e47d-4840-9932-17685862ab4d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.383300] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59d6563-09b3-4553-a9f2-3c97e6f15a8b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.416421] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d67f8a-c53c-4538-8e25-0d9878a00431 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.426594] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a4c48a-8b6b-4759-9714-a6464f2e7081 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.444521] env[68569]: DEBUG nova.compute.provider_tree [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 974.583010] env[68569]: INFO nova.compute.manager [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Rebuilding instance [ 974.625297] env[68569]: DEBUG nova.compute.manager [req-fff92f56-bdfd-4d3c-8da6-2e2f45ff011a req-5c00271c-e4b1-438a-a6ad-f46e9c27388f service nova] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Received event network-vif-deleted-9490c5cc-846a-4eee-aa25-a8544a7619d0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 974.641062] env[68569]: INFO nova.compute.manager [-] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Took 1.26 seconds to deallocate network for instance. [ 974.641474] env[68569]: DEBUG nova.compute.manager [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 974.646226] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ad409c-bbe5-4519-8ec9-b5053bbc6f87 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.949029] env[68569]: DEBUG nova.scheduler.client.report [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 975.158557] env[68569]: DEBUG oslo_concurrency.lockutils [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.455467] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.458080] env[68569]: DEBUG nova.compute.manager [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 975.463428] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.259s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.464155] env[68569]: INFO nova.compute.claims [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 975.467166] env[68569]: ERROR nova.compute.manager [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Setting instance vm_state to ERROR: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 975.467166] env[68569]: ERROR nova.compute.manager [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Traceback (most recent call last): [ 975.467166] env[68569]: ERROR nova.compute.manager [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] File "/opt/stack/nova/nova/compute/manager.py", line 11478, in _error_out_instance_on_exception [ 975.467166] env[68569]: ERROR nova.compute.manager [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] yield [ 975.467166] env[68569]: ERROR nova.compute.manager [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 975.467166] env[68569]: ERROR nova.compute.manager [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] disk_info = self.driver.migrate_disk_and_power_off( [ 975.467166] env[68569]: ERROR nova.compute.manager [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 975.467166] env[68569]: ERROR nova.compute.manager [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] [ 975.670460] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 975.670774] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8bfd071-1701-4651-b16d-600ad6879fab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.680094] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 975.680094] env[68569]: value = "task-3167421" [ 975.680094] env[68569]: _type = "Task" [ 975.680094] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.688590] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167421, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.976161] env[68569]: DEBUG nova.compute.utils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 975.980373] env[68569]: DEBUG nova.compute.manager [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 975.980501] env[68569]: DEBUG nova.network.neutron [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 975.994356] env[68569]: INFO nova.compute.manager [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Swapping old allocation on dict_keys(['a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6']) held by migration 3c07fed6-2f98-4b3f-9a0a-921364d9b5f8 for instance [ 976.018305] env[68569]: DEBUG nova.scheduler.client.report [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Overwriting current allocation {'allocations': {'a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 122}}, 'project_id': '9cc82d358e214a959ae6b34c33344b86', 'user_id': 'afebab35cda9438781e2b466ce586405', 'consumer_generation': 1} on consumer f8b56e81-f3ef-489b-a64c-be687cf99fd1 {{(pid=68569) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 976.021800] env[68569]: DEBUG nova.policy [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '575076a4e45f4a9fb3e804856d83094f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '713d88f021794769a64eef3807ade9be', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 976.192520] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] VM already powered off {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 976.193163] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 976.194047] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d617d95-3496-4aa6-98e0-bc0074322fc1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.202816] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 976.203197] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b67cd10d-4754-4c73-99cc-7cf53ec74677 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.276682] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 976.276682] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 976.276682] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleting the datastore file [datastore1] 0dc5da15-4c10-4754-ac82-a130b933295d {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 976.276682] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b882d093-a1c9-4d07-8c73-a3363e738a7a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.284913] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 976.284913] env[68569]: value = "task-3167423" [ 976.284913] env[68569]: _type = "Task" [ 976.284913] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.292362] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167423, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.331581] env[68569]: DEBUG nova.network.neutron [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Successfully created port: feea2419-1c39-4028-811f-c72311dae7a4 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 976.481062] env[68569]: DEBUG nova.compute.manager [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 976.776024] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eddc7b4-3509-454d-9ae3-2db78571d8f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.784272] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b375d8-16e6-4c64-a4c9-899aa23830e2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.796561] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167423, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156776} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.823971] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 976.825144] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 976.825144] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 976.831023] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e925545b-dda8-4552-8b6d-9aee38cb2d57 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.843905] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018bd6b7-6627-4b6d-886a-890ad12afb1c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.859250] env[68569]: DEBUG nova.compute.provider_tree [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.339695] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "f8b56e81-f3ef-489b-a64c-be687cf99fd1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.339960] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "f8b56e81-f3ef-489b-a64c-be687cf99fd1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.340212] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "f8b56e81-f3ef-489b-a64c-be687cf99fd1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.340409] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "f8b56e81-f3ef-489b-a64c-be687cf99fd1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.340577] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "f8b56e81-f3ef-489b-a64c-be687cf99fd1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.342322] env[68569]: INFO nova.compute.manager [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Terminating instance [ 977.361928] env[68569]: DEBUG nova.scheduler.client.report [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 977.496052] env[68569]: DEBUG nova.compute.manager [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 977.520326] env[68569]: DEBUG nova.virt.hardware [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 977.520626] env[68569]: DEBUG nova.virt.hardware [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 977.520810] env[68569]: DEBUG nova.virt.hardware [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 977.521017] env[68569]: DEBUG nova.virt.hardware [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 977.521194] env[68569]: DEBUG nova.virt.hardware [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 977.521368] env[68569]: DEBUG nova.virt.hardware [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 977.521582] env[68569]: DEBUG nova.virt.hardware [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 977.521752] env[68569]: DEBUG nova.virt.hardware [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 977.521960] env[68569]: DEBUG nova.virt.hardware [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 977.522084] env[68569]: DEBUG nova.virt.hardware [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 977.522256] env[68569]: DEBUG nova.virt.hardware [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 977.523159] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a267f51-a40d-4ed4-b491-9a4eb00e3511 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.532586] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20343af-9b0f-46bb-97fd-482610701da6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.580046] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.782955] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "281d7077-391a-4cce-9d31-af41568a2b7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.783252] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "281d7077-391a-4cce-9d31-af41568a2b7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.846989] env[68569]: DEBUG nova.compute.manager [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 977.850713] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 977.850713] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13eeeb03-38fb-4869-8441-84ecf129ebb6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.857569] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 977.860046] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78014f09-dd52-4454-b670-0659364d3cec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.867301] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.867557] env[68569]: DEBUG nova.compute.manager [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 977.876863] env[68569]: DEBUG nova.virt.hardware [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 977.877087] env[68569]: DEBUG nova.virt.hardware [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 977.877245] env[68569]: DEBUG nova.virt.hardware [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 977.877896] env[68569]: DEBUG nova.virt.hardware [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 977.877896] env[68569]: DEBUG nova.virt.hardware [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 977.877896] env[68569]: DEBUG nova.virt.hardware [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 977.877896] env[68569]: DEBUG nova.virt.hardware [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 977.881746] env[68569]: DEBUG nova.virt.hardware [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 977.882121] env[68569]: DEBUG nova.virt.hardware [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 977.882432] env[68569]: DEBUG nova.virt.hardware [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 977.882742] env[68569]: DEBUG nova.virt.hardware [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 977.884854] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.621s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.885224] env[68569]: DEBUG nova.objects.instance [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Lazy-loading 'resources' on Instance uuid 060fc4c8-b173-4fc4-8232-e13e3eac9dc3 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.887669] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034dc3fd-c9c9-430c-b49d-b434050b7332 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.892575] env[68569]: DEBUG oslo_vmware.api [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 977.892575] env[68569]: value = "task-3167424" [ 977.892575] env[68569]: _type = "Task" [ 977.892575] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.904228] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905cbe22-0166-4e0c-8eb6-f70fcabf9fb6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.918102] env[68569]: DEBUG nova.compute.manager [req-2442f65a-ce5e-49f4-bc53-45fa416cd242 req-535fe1b2-a1bb-43b3-a21a-6c0396ff064c service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Received event network-vif-plugged-feea2419-1c39-4028-811f-c72311dae7a4 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 977.918452] env[68569]: DEBUG oslo_concurrency.lockutils [req-2442f65a-ce5e-49f4-bc53-45fa416cd242 req-535fe1b2-a1bb-43b3-a21a-6c0396ff064c service nova] Acquiring lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.919050] env[68569]: DEBUG oslo_concurrency.lockutils [req-2442f65a-ce5e-49f4-bc53-45fa416cd242 req-535fe1b2-a1bb-43b3-a21a-6c0396ff064c service nova] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.919156] env[68569]: DEBUG oslo_concurrency.lockutils [req-2442f65a-ce5e-49f4-bc53-45fa416cd242 req-535fe1b2-a1bb-43b3-a21a-6c0396ff064c service nova] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.919473] env[68569]: DEBUG nova.compute.manager [req-2442f65a-ce5e-49f4-bc53-45fa416cd242 req-535fe1b2-a1bb-43b3-a21a-6c0396ff064c service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] No waiting events found dispatching network-vif-plugged-feea2419-1c39-4028-811f-c72311dae7a4 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 977.919701] env[68569]: WARNING nova.compute.manager [req-2442f65a-ce5e-49f4-bc53-45fa416cd242 req-535fe1b2-a1bb-43b3-a21a-6c0396ff064c service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Received unexpected event network-vif-plugged-feea2419-1c39-4028-811f-c72311dae7a4 for instance with vm_state building and task_state spawning. [ 977.920488] env[68569]: DEBUG oslo_vmware.api [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167424, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.937620] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:de:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f8442aa5-73db-4599-8564-b98a6ea26b9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c2568f4-2500-47df-982b-bf146c8d29d5', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 977.947935] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 977.949752] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 977.949752] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9223ee35-cf40-49ac-a679-b907cfcda172 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.973021] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 977.973021] env[68569]: value = "task-3167425" [ 977.973021] env[68569]: _type = "Task" [ 977.973021] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.978048] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167425, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.047550] env[68569]: DEBUG nova.network.neutron [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Successfully updated port: feea2419-1c39-4028-811f-c72311dae7a4 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 978.258024] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.258281] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.258459] env[68569]: INFO nova.compute.manager [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Shelving [ 978.285802] env[68569]: DEBUG nova.compute.manager [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 978.376746] env[68569]: DEBUG nova.compute.utils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 978.378274] env[68569]: DEBUG nova.compute.manager [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 978.378484] env[68569]: DEBUG nova.network.neutron [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 978.407433] env[68569]: DEBUG oslo_vmware.api [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167424, 'name': PowerOffVM_Task, 'duration_secs': 0.189499} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.407734] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 978.407969] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 978.408256] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7ec7e19-ec35-4699-a4f5-ecf467cd9c98 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.414368] env[68569]: DEBUG nova.policy [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '842e74e7139540d7a537eb8bd56bca78', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e52d937c83d46daa36746494bd7ccbe', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 978.479843] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167425, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.550303] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "refresh_cache-92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.550495] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquired lock "refresh_cache-92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 978.550624] env[68569]: DEBUG nova.network.neutron [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 978.652026] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a9ba89-0895-4d42-a09a-0a7cd53376b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.656283] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 978.656491] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 978.656687] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleting the datastore file [datastore2] f8b56e81-f3ef-489b-a64c-be687cf99fd1 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 978.658867] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-001f77c2-89f2-4bd5-ae63-39f29075442c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.661408] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4ce3d1-50ab-41f1-b99e-c505ead3fff0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.693965] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f195ffbe-461f-4fb5-83fd-2a8a885dc8eb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.696862] env[68569]: DEBUG oslo_vmware.api [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 978.696862] env[68569]: value = "task-3167427" [ 978.696862] env[68569]: _type = "Task" [ 978.696862] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.703965] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d7b19e-262e-4f81-a067-d9624523bb3c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.710744] env[68569]: DEBUG oslo_vmware.api [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167427, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.720483] env[68569]: DEBUG nova.compute.provider_tree [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.733299] env[68569]: DEBUG nova.network.neutron [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Successfully created port: e0dc9415-fd47-40f4-aa7d-b89db0dccfae {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 978.803511] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.881411] env[68569]: DEBUG nova.compute.manager [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 978.980430] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167425, 'name': CreateVM_Task, 'duration_secs': 0.789131} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.980581] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 978.981260] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.981429] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 978.981766] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 978.982041] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0420276-9b30-4560-b4b6-bc93c0a5c9c7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.986706] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 978.986706] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52664f33-3598-bb17-bb6a-33244f895823" [ 978.986706] env[68569]: _type = "Task" [ 978.986706] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.994287] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52664f33-3598-bb17-bb6a-33244f895823, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.085011] env[68569]: DEBUG nova.network.neutron [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 979.207351] env[68569]: DEBUG oslo_vmware.api [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167427, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161537} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.207612] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 979.207775] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 979.207944] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 979.208167] env[68569]: INFO nova.compute.manager [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Took 1.36 seconds to destroy the instance on the hypervisor. [ 979.208405] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 979.208589] env[68569]: DEBUG nova.compute.manager [-] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 979.208681] env[68569]: DEBUG nova.network.neutron [-] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 979.223450] env[68569]: DEBUG nova.network.neutron [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Updating instance_info_cache with network_info: [{"id": "feea2419-1c39-4028-811f-c72311dae7a4", "address": "fa:16:3e:16:ea:60", "network": {"id": "9b574e65-367b-4135-8ce6-e40fd22bc9f3", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1543341439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "713d88f021794769a64eef3807ade9be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfeea2419-1c", "ovs_interfaceid": "feea2419-1c39-4028-811f-c72311dae7a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.226823] env[68569]: DEBUG nova.scheduler.client.report [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 979.267031] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 979.267342] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60187035-7b64-4ebd-a824-f5e543610aec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.275019] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 979.275019] env[68569]: value = "task-3167428" [ 979.275019] env[68569]: _type = "Task" [ 979.275019] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.283111] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167428, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.496602] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52664f33-3598-bb17-bb6a-33244f895823, 'name': SearchDatastore_Task, 'duration_secs': 0.01352} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.496895] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 979.497145] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 979.497383] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.497529] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.497706] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 979.498041] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f57a7273-b07f-458c-b6f9-cf8cdefbcc3e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.505903] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 979.506106] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 979.506809] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-179edc0c-9f00-4263-8d7f-61ce7f0d02f4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.511862] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 979.511862] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52388fc3-0d39-7dbe-bb0e-3489a0d10463" [ 979.511862] env[68569]: _type = "Task" [ 979.511862] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.519891] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52388fc3-0d39-7dbe-bb0e-3489a0d10463, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.726637] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Releasing lock "refresh_cache-92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 979.726993] env[68569]: DEBUG nova.compute.manager [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Instance network_info: |[{"id": "feea2419-1c39-4028-811f-c72311dae7a4", "address": "fa:16:3e:16:ea:60", "network": {"id": "9b574e65-367b-4135-8ce6-e40fd22bc9f3", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1543341439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "713d88f021794769a64eef3807ade9be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfeea2419-1c", "ovs_interfaceid": "feea2419-1c39-4028-811f-c72311dae7a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 979.727460] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:ea:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c791d09c-1086-4ee1-bcde-6ca7d259cabd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'feea2419-1c39-4028-811f-c72311dae7a4', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 979.736081] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Creating folder: Project (713d88f021794769a64eef3807ade9be). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 979.736860] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.738880] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17e01b2a-5c0b-48c2-997a-2bc0a9cb6016 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.740929] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.829s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.743053] env[68569]: INFO nova.compute.claims [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 979.756152] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Created folder: Project (713d88f021794769a64eef3807ade9be) in parent group-v633430. [ 979.756436] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Creating folder: Instances. Parent ref: group-v633665. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 979.756753] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-19b19cfc-b19e-4a2b-9e3f-d51f3b09c5f3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.762666] env[68569]: INFO nova.scheduler.client.report [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Deleted allocations for instance 060fc4c8-b173-4fc4-8232-e13e3eac9dc3 [ 979.768131] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Created folder: Instances in parent group-v633665. [ 979.768131] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 979.768131] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 979.768131] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a3dc67f8-2ac9-44d5-a269-96bdb1b5040a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.794557] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167428, 'name': PowerOffVM_Task, 'duration_secs': 0.214378} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.795987] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 979.796322] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 979.796322] env[68569]: value = "task-3167431" [ 979.796322] env[68569]: _type = "Task" [ 979.796322] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.797050] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1afcca-e216-4534-8505-941dc7b86be0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.809481] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167431, 'name': CreateVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.826121] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b6f206-e23d-49e9-92bd-d2b31bdd9fc5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.890929] env[68569]: DEBUG nova.compute.manager [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 979.910625] env[68569]: DEBUG nova.network.neutron [-] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.923295] env[68569]: DEBUG nova.virt.hardware [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 979.923573] env[68569]: DEBUG nova.virt.hardware [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 979.923712] env[68569]: DEBUG nova.virt.hardware [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 979.923943] env[68569]: DEBUG nova.virt.hardware [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 979.924135] env[68569]: DEBUG nova.virt.hardware [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 979.924348] env[68569]: DEBUG nova.virt.hardware [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 979.924562] env[68569]: DEBUG nova.virt.hardware [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 979.924717] env[68569]: DEBUG nova.virt.hardware [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 979.924881] env[68569]: DEBUG nova.virt.hardware [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 979.925053] env[68569]: DEBUG nova.virt.hardware [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 979.925227] env[68569]: DEBUG nova.virt.hardware [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 979.926231] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea33231-dc81-43c5-bafe-d8a889fb19c5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.935355] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1980e151-ee17-4e39-adde-08f797d835a7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.953629] env[68569]: DEBUG nova.compute.manager [req-e9af8ac0-9956-472d-b0d1-f35d3706e5d8 req-26d80279-ea49-4f4c-ab41-5644dc4bb279 service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Received event network-changed-feea2419-1c39-4028-811f-c72311dae7a4 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 979.953822] env[68569]: DEBUG nova.compute.manager [req-e9af8ac0-9956-472d-b0d1-f35d3706e5d8 req-26d80279-ea49-4f4c-ab41-5644dc4bb279 service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Refreshing instance network info cache due to event network-changed-feea2419-1c39-4028-811f-c72311dae7a4. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 979.954052] env[68569]: DEBUG oslo_concurrency.lockutils [req-e9af8ac0-9956-472d-b0d1-f35d3706e5d8 req-26d80279-ea49-4f4c-ab41-5644dc4bb279 service nova] Acquiring lock "refresh_cache-92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.954243] env[68569]: DEBUG oslo_concurrency.lockutils [req-e9af8ac0-9956-472d-b0d1-f35d3706e5d8 req-26d80279-ea49-4f4c-ab41-5644dc4bb279 service nova] Acquired lock "refresh_cache-92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.954410] env[68569]: DEBUG nova.network.neutron [req-e9af8ac0-9956-472d-b0d1-f35d3706e5d8 req-26d80279-ea49-4f4c-ab41-5644dc4bb279 service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Refreshing network info cache for port feea2419-1c39-4028-811f-c72311dae7a4 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 980.023104] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52388fc3-0d39-7dbe-bb0e-3489a0d10463, 'name': SearchDatastore_Task, 'duration_secs': 0.017641} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.023988] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-392f8e1f-58d6-48db-b596-0876a7259590 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.030391] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 980.030391] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5272d73c-c1f1-a615-4bb9-54dbff7fe0e7" [ 980.030391] env[68569]: _type = "Task" [ 980.030391] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.039574] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5272d73c-c1f1-a615-4bb9-54dbff7fe0e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.271193] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a0abff52-fd3f-4d58-bdb9-f661ca37e43f tempest-ServersTestJSON-747284175 tempest-ServersTestJSON-747284175-project-member] Lock "060fc4c8-b173-4fc4-8232-e13e3eac9dc3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.704s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.311178] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167431, 'name': CreateVM_Task, 'duration_secs': 0.343302} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.311365] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 980.312368] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.312546] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.312852] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 980.313135] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b8ae5bd-b1ce-4274-a99f-9623ed8b942a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.318593] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 980.318593] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ed41a0-1a9b-20ba-eb77-f98ab59f71ab" [ 980.318593] env[68569]: _type = "Task" [ 980.318593] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.326917] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ed41a0-1a9b-20ba-eb77-f98ab59f71ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.340079] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 980.340079] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-28d9eaed-9dfb-4ba5-8c7a-08bd32791eda {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.346395] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 980.346395] env[68569]: value = "task-3167432" [ 980.346395] env[68569]: _type = "Task" [ 980.346395] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.357530] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167432, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.415072] env[68569]: INFO nova.compute.manager [-] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Took 1.20 seconds to deallocate network for instance. [ 980.484628] env[68569]: DEBUG nova.network.neutron [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Successfully updated port: e0dc9415-fd47-40f4-aa7d-b89db0dccfae {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 980.548934] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5272d73c-c1f1-a615-4bb9-54dbff7fe0e7, 'name': SearchDatastore_Task, 'duration_secs': 0.011869} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.549285] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.549681] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 0dc5da15-4c10-4754-ac82-a130b933295d/0dc5da15-4c10-4754-ac82-a130b933295d.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 980.550075] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e320ee61-0fc7-4200-bdab-a3b5db094712 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.559741] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 980.559741] env[68569]: value = "task-3167433" [ 980.559741] env[68569]: _type = "Task" [ 980.559741] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.578739] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167433, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.727640] env[68569]: DEBUG nova.network.neutron [req-e9af8ac0-9956-472d-b0d1-f35d3706e5d8 req-26d80279-ea49-4f4c-ab41-5644dc4bb279 service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Updated VIF entry in instance network info cache for port feea2419-1c39-4028-811f-c72311dae7a4. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 980.728172] env[68569]: DEBUG nova.network.neutron [req-e9af8ac0-9956-472d-b0d1-f35d3706e5d8 req-26d80279-ea49-4f4c-ab41-5644dc4bb279 service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Updating instance_info_cache with network_info: [{"id": "feea2419-1c39-4028-811f-c72311dae7a4", "address": "fa:16:3e:16:ea:60", "network": {"id": "9b574e65-367b-4135-8ce6-e40fd22bc9f3", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1543341439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "713d88f021794769a64eef3807ade9be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfeea2419-1c", "ovs_interfaceid": "feea2419-1c39-4028-811f-c72311dae7a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.830309] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ed41a0-1a9b-20ba-eb77-f98ab59f71ab, 'name': SearchDatastore_Task, 'duration_secs': 0.010568} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.833731] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.833984] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 980.834271] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.834422] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.834603] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 980.835420] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4f8af52-a55b-45a3-8e9e-1020695cd618 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.855945] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 980.856172] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 980.858311] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8efa7932-8f4a-4adb-bd6a-75ee23b4045e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.863585] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167432, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.867341] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 980.867341] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522ec820-b6cc-2ef8-6e47-401cf95466b3" [ 980.867341] env[68569]: _type = "Task" [ 980.867341] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.879738] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522ec820-b6cc-2ef8-6e47-401cf95466b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.922849] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.986815] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "refresh_cache-1689f1f3-53f2-4c02-a969-e4dae21f14b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.986996] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired lock "refresh_cache-1689f1f3-53f2-4c02-a969-e4dae21f14b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.987119] env[68569]: DEBUG nova.network.neutron [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.074265] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167433, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50181} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.074418] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 0dc5da15-4c10-4754-ac82-a130b933295d/0dc5da15-4c10-4754-ac82-a130b933295d.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 981.074630] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 981.074891] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e0317631-3cac-4041-bafd-c1a891261316 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.083390] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 981.083390] env[68569]: value = "task-3167434" [ 981.083390] env[68569]: _type = "Task" [ 981.083390] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.092642] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167434, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.108890] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a362d6-91ec-4a09-a9c7-7b8b672cb136 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.116676] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07bf2d5-1da0-427d-98ac-20e53f2da779 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.149098] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47088858-f581-4fa9-b33a-009cd55c5e09 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.157592] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8870b39e-e93b-4825-8eeb-85f67381f274 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.172771] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "53cc8dbd-c163-403a-9286-e1f8ad939f94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.173066] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "53cc8dbd-c163-403a-9286-e1f8ad939f94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.173290] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "53cc8dbd-c163-403a-9286-e1f8ad939f94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.173472] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "53cc8dbd-c163-403a-9286-e1f8ad939f94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.173640] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "53cc8dbd-c163-403a-9286-e1f8ad939f94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.175589] env[68569]: INFO nova.compute.manager [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Terminating instance [ 981.177573] env[68569]: DEBUG nova.compute.provider_tree [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.230935] env[68569]: DEBUG oslo_concurrency.lockutils [req-e9af8ac0-9956-472d-b0d1-f35d3706e5d8 req-26d80279-ea49-4f4c-ab41-5644dc4bb279 service nova] Releasing lock "refresh_cache-92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.231222] env[68569]: DEBUG nova.compute.manager [req-e9af8ac0-9956-472d-b0d1-f35d3706e5d8 req-26d80279-ea49-4f4c-ab41-5644dc4bb279 service nova] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Received event network-vif-deleted-2455c808-85b7-415e-9905-53066039a3f3 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 981.357486] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167432, 'name': CreateSnapshot_Task, 'duration_secs': 0.545647} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.357745] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 981.358588] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2beb096e-f90e-4d80-a955-8a434ed9c680 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.375978] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522ec820-b6cc-2ef8-6e47-401cf95466b3, 'name': SearchDatastore_Task, 'duration_secs': 0.058231} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.377043] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c75981d3-fe94-4593-a372-da4316772523 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.381982] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 981.381982] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a03e65-a54d-04c1-a3a0-6a1db23d5197" [ 981.381982] env[68569]: _type = "Task" [ 981.381982] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.389608] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a03e65-a54d-04c1-a3a0-6a1db23d5197, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.528282] env[68569]: DEBUG nova.network.neutron [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 981.592926] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167434, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.191711} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.593310] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 981.593993] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5165e402-5ebf-4ef1-aef5-f07ada1d2642 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.618239] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] 0dc5da15-4c10-4754-ac82-a130b933295d/0dc5da15-4c10-4754-ac82-a130b933295d.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 981.618559] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3db94291-d447-41e9-ad43-9fcfebba0aa8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.639585] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 981.639585] env[68569]: value = "task-3167435" [ 981.639585] env[68569]: _type = "Task" [ 981.639585] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.653417] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167435, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.680466] env[68569]: DEBUG nova.network.neutron [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Updating instance_info_cache with network_info: [{"id": "e0dc9415-fd47-40f4-aa7d-b89db0dccfae", "address": "fa:16:3e:9b:80:66", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0dc9415-fd", "ovs_interfaceid": "e0dc9415-fd47-40f4-aa7d-b89db0dccfae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.682254] env[68569]: DEBUG nova.scheduler.client.report [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 981.685586] env[68569]: DEBUG nova.compute.manager [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 981.685789] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 981.686956] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691ba532-68ee-48f3-94f7-a59dbaa4dc07 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.694091] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 981.694331] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7f0f6d3-5c17-45a9-abf8-13749dd84d92 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.700245] env[68569]: DEBUG oslo_vmware.api [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 981.700245] env[68569]: value = "task-3167436" [ 981.700245] env[68569]: _type = "Task" [ 981.700245] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.709787] env[68569]: DEBUG oslo_vmware.api [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3167436, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.875872] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 981.876315] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ab1cf334-2f93-4dee-99a7-5a102b252a7c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.888323] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 981.888323] env[68569]: value = "task-3167437" [ 981.888323] env[68569]: _type = "Task" [ 981.888323] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.900106] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a03e65-a54d-04c1-a3a0-6a1db23d5197, 'name': SearchDatastore_Task, 'duration_secs': 0.009447} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.900802] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.901173] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948/92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 981.901385] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-240a9e4e-4f97-4669-960c-930eb2d8f645 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.907100] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167437, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.916139] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 981.916139] env[68569]: value = "task-3167438" [ 981.916139] env[68569]: _type = "Task" [ 981.916139] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.929078] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.973251] env[68569]: DEBUG nova.compute.manager [req-439671e4-06c5-4330-b1e2-7ae8780fa923 req-0610ddf2-c396-4cf6-a6ea-68adf28bb908 service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Received event network-vif-plugged-e0dc9415-fd47-40f4-aa7d-b89db0dccfae {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 981.973251] env[68569]: DEBUG oslo_concurrency.lockutils [req-439671e4-06c5-4330-b1e2-7ae8780fa923 req-0610ddf2-c396-4cf6-a6ea-68adf28bb908 service nova] Acquiring lock "1689f1f3-53f2-4c02-a969-e4dae21f14b7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.973251] env[68569]: DEBUG oslo_concurrency.lockutils [req-439671e4-06c5-4330-b1e2-7ae8780fa923 req-0610ddf2-c396-4cf6-a6ea-68adf28bb908 service nova] Lock "1689f1f3-53f2-4c02-a969-e4dae21f14b7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.973251] env[68569]: DEBUG oslo_concurrency.lockutils [req-439671e4-06c5-4330-b1e2-7ae8780fa923 req-0610ddf2-c396-4cf6-a6ea-68adf28bb908 service nova] Lock "1689f1f3-53f2-4c02-a969-e4dae21f14b7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.973530] env[68569]: DEBUG nova.compute.manager [req-439671e4-06c5-4330-b1e2-7ae8780fa923 req-0610ddf2-c396-4cf6-a6ea-68adf28bb908 service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] No waiting events found dispatching network-vif-plugged-e0dc9415-fd47-40f4-aa7d-b89db0dccfae {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 981.973530] env[68569]: WARNING nova.compute.manager [req-439671e4-06c5-4330-b1e2-7ae8780fa923 req-0610ddf2-c396-4cf6-a6ea-68adf28bb908 service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Received unexpected event network-vif-plugged-e0dc9415-fd47-40f4-aa7d-b89db0dccfae for instance with vm_state building and task_state spawning. [ 981.973677] env[68569]: DEBUG nova.compute.manager [req-439671e4-06c5-4330-b1e2-7ae8780fa923 req-0610ddf2-c396-4cf6-a6ea-68adf28bb908 service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Received event network-changed-e0dc9415-fd47-40f4-aa7d-b89db0dccfae {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 981.973754] env[68569]: DEBUG nova.compute.manager [req-439671e4-06c5-4330-b1e2-7ae8780fa923 req-0610ddf2-c396-4cf6-a6ea-68adf28bb908 service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Refreshing instance network info cache due to event network-changed-e0dc9415-fd47-40f4-aa7d-b89db0dccfae. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 981.973906] env[68569]: DEBUG oslo_concurrency.lockutils [req-439671e4-06c5-4330-b1e2-7ae8780fa923 req-0610ddf2-c396-4cf6-a6ea-68adf28bb908 service nova] Acquiring lock "refresh_cache-1689f1f3-53f2-4c02-a969-e4dae21f14b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.152648] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167435, 'name': ReconfigVM_Task, 'duration_secs': 0.285706} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.152648] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Reconfigured VM instance instance-00000053 to attach disk [datastore2] 0dc5da15-4c10-4754-ac82-a130b933295d/0dc5da15-4c10-4754-ac82-a130b933295d.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 982.154035] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa79d79a-db24-4656-932f-53de0a3d1e1c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.160777] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 982.160777] env[68569]: value = "task-3167439" [ 982.160777] env[68569]: _type = "Task" [ 982.160777] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.173072] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167439, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.187354] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.446s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.187900] env[68569]: DEBUG nova.compute.manager [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 982.190873] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Releasing lock "refresh_cache-1689f1f3-53f2-4c02-a969-e4dae21f14b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.191571] env[68569]: DEBUG nova.compute.manager [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Instance network_info: |[{"id": "e0dc9415-fd47-40f4-aa7d-b89db0dccfae", "address": "fa:16:3e:9b:80:66", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0dc9415-fd", "ovs_interfaceid": "e0dc9415-fd47-40f4-aa7d-b89db0dccfae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 982.191571] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.833s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.191792] env[68569]: DEBUG nova.objects.instance [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lazy-loading 'resources' on Instance uuid b40c9dec-cebc-4d23-8df4-96e804333706 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.192825] env[68569]: DEBUG oslo_concurrency.lockutils [req-439671e4-06c5-4330-b1e2-7ae8780fa923 req-0610ddf2-c396-4cf6-a6ea-68adf28bb908 service nova] Acquired lock "refresh_cache-1689f1f3-53f2-4c02-a969-e4dae21f14b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.193096] env[68569]: DEBUG nova.network.neutron [req-439671e4-06c5-4330-b1e2-7ae8780fa923 req-0610ddf2-c396-4cf6-a6ea-68adf28bb908 service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Refreshing network info cache for port e0dc9415-fd47-40f4-aa7d-b89db0dccfae {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 982.194567] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:80:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbdab640-5fea-4254-8bd3-f855b7eaca0d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e0dc9415-fd47-40f4-aa7d-b89db0dccfae', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 982.202531] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 982.203593] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 982.206787] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf598b50-b114-4e42-8051-aa6d4b6935be {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.229524] env[68569]: DEBUG oslo_vmware.api [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3167436, 'name': PowerOffVM_Task, 'duration_secs': 0.25072} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.231099] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 982.231295] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 982.231533] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 982.231533] env[68569]: value = "task-3167440" [ 982.231533] env[68569]: _type = "Task" [ 982.231533] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.231718] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a1760f8-be6e-4284-8305-7d7550198535 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.243894] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167440, 'name': CreateVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.348851] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 982.349276] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 982.349477] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Deleting the datastore file [datastore2] 53cc8dbd-c163-403a-9286-e1f8ad939f94 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 982.349858] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e010cf3-c280-4736-876a-1015e6500dd6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.359528] env[68569]: DEBUG oslo_vmware.api [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for the task: (returnval){ [ 982.359528] env[68569]: value = "task-3167442" [ 982.359528] env[68569]: _type = "Task" [ 982.359528] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.371733] env[68569]: DEBUG oslo_vmware.api [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3167442, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.400242] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167437, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.426797] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167438, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.670718] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167439, 'name': Rename_Task, 'duration_secs': 0.448122} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.671061] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 982.671365] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8fe416d4-c183-492b-8e6a-a46f929a984f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.677763] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 982.677763] env[68569]: value = "task-3167443" [ 982.677763] env[68569]: _type = "Task" [ 982.677763] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.685431] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167443, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.692845] env[68569]: DEBUG nova.compute.utils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 982.694256] env[68569]: DEBUG nova.compute.manager [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 982.694430] env[68569]: DEBUG nova.network.neutron [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 982.743885] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167440, 'name': CreateVM_Task, 'duration_secs': 0.398175} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.746653] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 982.750093] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.750093] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.750093] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 982.750295] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bb1a22c-0dd6-4080-bb53-e9307d9dfd37 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.756221] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 982.756221] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5203b81b-17e6-02e8-2bce-ddf49afd0336" [ 982.756221] env[68569]: _type = "Task" [ 982.756221] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.765731] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5203b81b-17e6-02e8-2bce-ddf49afd0336, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.768825] env[68569]: DEBUG nova.policy [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '868bbe891585423f85374f6dffdc7813', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62ff17f9dcc242e0aff061402e57bdcd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 982.869601] env[68569]: DEBUG oslo_vmware.api [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Task: {'id': task-3167442, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.289942} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.872155] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.872367] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 982.872562] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 982.872739] env[68569]: INFO nova.compute.manager [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Took 1.19 seconds to destroy the instance on the hypervisor. [ 982.872987] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 982.875255] env[68569]: DEBUG nova.compute.manager [-] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 982.875357] env[68569]: DEBUG nova.network.neutron [-] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 982.898642] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167437, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.930964] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167438, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521972} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.933879] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948/92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 982.934114] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 982.934798] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01941391-2110-481a-8ac7-e599eac6ddec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.941995] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 982.941995] env[68569]: value = "task-3167444" [ 982.941995] env[68569]: _type = "Task" [ 982.941995] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.955054] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167444, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.011147] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551291f1-ddfa-48be-8c3d-09d73c009544 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.019240] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4083a084-624b-4cfc-ae9d-ab1b7e228695 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.057963] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2d0292-32b3-42ce-a776-f6a69611f550 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.066158] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeefed59-3426-4ac7-8d96-11f11b54dd70 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.082658] env[68569]: DEBUG nova.compute.provider_tree [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 983.117846] env[68569]: DEBUG nova.network.neutron [req-439671e4-06c5-4330-b1e2-7ae8780fa923 req-0610ddf2-c396-4cf6-a6ea-68adf28bb908 service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Updated VIF entry in instance network info cache for port e0dc9415-fd47-40f4-aa7d-b89db0dccfae. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 983.118236] env[68569]: DEBUG nova.network.neutron [req-439671e4-06c5-4330-b1e2-7ae8780fa923 req-0610ddf2-c396-4cf6-a6ea-68adf28bb908 service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Updating instance_info_cache with network_info: [{"id": "e0dc9415-fd47-40f4-aa7d-b89db0dccfae", "address": "fa:16:3e:9b:80:66", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0dc9415-fd", "ovs_interfaceid": "e0dc9415-fd47-40f4-aa7d-b89db0dccfae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.192608] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167443, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.199214] env[68569]: DEBUG nova.compute.manager [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 983.252590] env[68569]: DEBUG nova.network.neutron [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Successfully created port: 5de82b0c-a595-4eb1-89af-5ab6517061ad {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 983.267254] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5203b81b-17e6-02e8-2bce-ddf49afd0336, 'name': SearchDatastore_Task, 'duration_secs': 0.021116} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.267540] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.267763] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 983.268164] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.268320] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 983.268497] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 983.269348] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2d8f27d-e62c-419f-91ca-f76f06694b21 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.278083] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 983.278336] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 983.279063] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8e164da-f5df-4423-af53-8d15209ad2d1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.284204] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 983.284204] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d539fb-b01b-9164-43f2-a54f06d9e5f6" [ 983.284204] env[68569]: _type = "Task" [ 983.284204] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.291572] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d539fb-b01b-9164-43f2-a54f06d9e5f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.401228] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167437, 'name': CloneVM_Task, 'duration_secs': 1.319053} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.402498] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Created linked-clone VM from snapshot [ 983.403475] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b5cdb1-98ca-4569-b50c-5be6b567012c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.409810] env[68569]: DEBUG nova.compute.manager [req-f3e299df-89e7-4755-856f-a61013615270 req-a85dfd6a-e3e4-4e91-b39d-459393743eb5 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Received event network-vif-deleted-9ecb2363-68e3-455a-8b8f-db5226a52abf {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 983.409998] env[68569]: INFO nova.compute.manager [req-f3e299df-89e7-4755-856f-a61013615270 req-a85dfd6a-e3e4-4e91-b39d-459393743eb5 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Neutron deleted interface 9ecb2363-68e3-455a-8b8f-db5226a52abf; detaching it from the instance and deleting it from the info cache [ 983.410172] env[68569]: DEBUG nova.network.neutron [req-f3e299df-89e7-4755-856f-a61013615270 req-a85dfd6a-e3e4-4e91-b39d-459393743eb5 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.416885] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Uploading image f1fc5b9e-4d0a-4b68-b39b-3d0785190c06 {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 983.452578] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 983.452578] env[68569]: value = "vm-633669" [ 983.452578] env[68569]: _type = "VirtualMachine" [ 983.452578] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 983.455827] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-53d6f8f5-6b89-4dcd-b0c4-e333c5f54f12 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.457460] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167444, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072091} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.457717] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 983.458894] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf1d18f-e1f3-4adc-aac3-fdbcad74b614 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.462887] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lease: (returnval){ [ 983.462887] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527a13be-a88a-a45e-b096-17e8e5bf80e9" [ 983.462887] env[68569]: _type = "HttpNfcLease" [ 983.462887] env[68569]: } obtained for exporting VM: (result){ [ 983.462887] env[68569]: value = "vm-633669" [ 983.462887] env[68569]: _type = "VirtualMachine" [ 983.462887] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 983.463180] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the lease: (returnval){ [ 983.463180] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527a13be-a88a-a45e-b096-17e8e5bf80e9" [ 983.463180] env[68569]: _type = "HttpNfcLease" [ 983.463180] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 983.485262] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948/92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 983.485567] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68d0c83b-07e2-4092-a2dc-c4bc77ec9547 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.505167] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 983.505167] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527a13be-a88a-a45e-b096-17e8e5bf80e9" [ 983.505167] env[68569]: _type = "HttpNfcLease" [ 983.505167] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 983.505447] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 983.505447] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527a13be-a88a-a45e-b096-17e8e5bf80e9" [ 983.505447] env[68569]: _type = "HttpNfcLease" [ 983.505447] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 983.506227] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947efb46-0cc9-410f-bb1a-d1a7898b7744 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.509730] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 983.509730] env[68569]: value = "task-3167446" [ 983.509730] env[68569]: _type = "Task" [ 983.509730] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.515972] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b59633-ca21-8204-1547-a6b0df940c8e/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 983.516212] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b59633-ca21-8204-1547-a6b0df940c8e/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 983.578323] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167446, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.603825] env[68569]: ERROR nova.scheduler.client.report [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [req-8456c7f6-b73f-4c6c-b55e-673dcc0acffe] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8456c7f6-b73f-4c6c-b55e-673dcc0acffe"}]} [ 983.620702] env[68569]: DEBUG nova.scheduler.client.report [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 983.622820] env[68569]: DEBUG oslo_concurrency.lockutils [req-439671e4-06c5-4330-b1e2-7ae8780fa923 req-0610ddf2-c396-4cf6-a6ea-68adf28bb908 service nova] Releasing lock "refresh_cache-1689f1f3-53f2-4c02-a969-e4dae21f14b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.636807] env[68569]: DEBUG nova.scheduler.client.report [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 983.637196] env[68569]: DEBUG nova.compute.provider_tree [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 983.648028] env[68569]: DEBUG nova.scheduler.client.report [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 983.668599] env[68569]: DEBUG nova.scheduler.client.report [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 983.690377] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167443, 'name': PowerOnVM_Task, 'duration_secs': 0.541224} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.693180] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 983.693396] env[68569]: DEBUG nova.compute.manager [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 983.694630] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e0e57f-3430-4147-be1d-d59fde80ab84 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.697418] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.699554] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.705281] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fec65384-9b57-4da2-9d1a-b77e0e762619 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.794558] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d539fb-b01b-9164-43f2-a54f06d9e5f6, 'name': SearchDatastore_Task, 'duration_secs': 0.048059} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.798292] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-949f9b29-37eb-4eda-a255-daac61836dca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.803665] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 983.803665] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52162808-677a-b170-e27c-a788257b258f" [ 983.803665] env[68569]: _type = "Task" [ 983.803665] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.812546] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52162808-677a-b170-e27c-a788257b258f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.885635] env[68569]: DEBUG nova.network.neutron [-] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.912676] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea218e09-3e5d-4028-aa1c-95c4eee39131 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.923876] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d501894-49fc-458e-a549-998be2dcfd1e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.962987] env[68569]: DEBUG nova.compute.manager [req-f3e299df-89e7-4755-856f-a61013615270 req-a85dfd6a-e3e4-4e91-b39d-459393743eb5 service nova] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Detach interface failed, port_id=9ecb2363-68e3-455a-8b8f-db5226a52abf, reason: Instance 53cc8dbd-c163-403a-9286-e1f8ad939f94 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 983.982670] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b081aa-3fa4-405f-81d6-68c9f6473e29 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.991236] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4e6d13-797c-4a9a-b6da-ba0225efe87e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.027588] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9bc5dc-eafe-42e7-8ea7-fee613383427 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.036101] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167446, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.040534] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ce68d5-5816-43d2-9177-4af0405d0dc7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.054968] env[68569]: DEBUG nova.compute.provider_tree [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 984.215582] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.216057] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.217238] env[68569]: DEBUG nova.compute.manager [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 984.220084] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.221630] env[68569]: INFO nova.compute.manager [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] bringing vm to original state: 'stopped' [ 984.224302] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.224819] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.224912] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.224955] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68569) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11160}} [ 984.225125] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.243586] env[68569]: DEBUG nova.virt.hardware [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 984.243914] env[68569]: DEBUG nova.virt.hardware [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 984.244084] env[68569]: DEBUG nova.virt.hardware [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 984.244583] env[68569]: DEBUG nova.virt.hardware [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 984.244907] env[68569]: DEBUG nova.virt.hardware [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 984.245098] env[68569]: DEBUG nova.virt.hardware [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 984.245373] env[68569]: DEBUG nova.virt.hardware [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 984.245541] env[68569]: DEBUG nova.virt.hardware [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 984.245781] env[68569]: DEBUG nova.virt.hardware [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 984.246250] env[68569]: DEBUG nova.virt.hardware [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 984.246250] env[68569]: DEBUG nova.virt.hardware [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 984.247453] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254f4194-dc2a-424e-8c09-73678de805b4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.258655] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2328a62a-f69b-482e-9f1c-cc51427e8f89 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.314184] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52162808-677a-b170-e27c-a788257b258f, 'name': SearchDatastore_Task, 'duration_secs': 0.010369} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.314500] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 984.314876] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 1689f1f3-53f2-4c02-a969-e4dae21f14b7/1689f1f3-53f2-4c02-a969-e4dae21f14b7.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 984.315203] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7777e8a7-1720-4924-8cab-45c09be79028 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.322878] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 984.322878] env[68569]: value = "task-3167447" [ 984.322878] env[68569]: _type = "Task" [ 984.322878] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.332524] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167447, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.392027] env[68569]: INFO nova.compute.manager [-] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Took 1.52 seconds to deallocate network for instance. [ 984.536953] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167446, 'name': ReconfigVM_Task, 'duration_secs': 0.649973} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.538030] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948/92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 984.538822] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1987521f-4181-4399-b4b4-bbc88243727f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.550047] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 984.550047] env[68569]: value = "task-3167448" [ 984.550047] env[68569]: _type = "Task" [ 984.550047] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.567466] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167448, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.601564] env[68569]: DEBUG nova.scheduler.client.report [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 124 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 984.601772] env[68569]: DEBUG nova.compute.provider_tree [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 124 to 125 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 984.601883] env[68569]: DEBUG nova.compute.provider_tree [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 984.731008] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.834416] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167447, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469292} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.834843] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 1689f1f3-53f2-4c02-a969-e4dae21f14b7/1689f1f3-53f2-4c02-a969-e4dae21f14b7.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 984.835087] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 984.835355] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-052b397f-79e2-4600-80a4-ba5a7a0d17be {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.843017] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 984.843017] env[68569]: value = "task-3167449" [ 984.843017] env[68569]: _type = "Task" [ 984.843017] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.851993] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167449, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.900119] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.907306] env[68569]: DEBUG nova.network.neutron [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Successfully updated port: 5de82b0c-a595-4eb1-89af-5ab6517061ad {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 984.951411] env[68569]: DEBUG nova.compute.manager [req-378021d7-93db-4e33-aa5c-2a844955b1d2 req-4ac2c3a8-1def-44a3-acd7-cdd331e90ef0 service nova] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Received event network-vif-plugged-5de82b0c-a595-4eb1-89af-5ab6517061ad {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 984.951722] env[68569]: DEBUG oslo_concurrency.lockutils [req-378021d7-93db-4e33-aa5c-2a844955b1d2 req-4ac2c3a8-1def-44a3-acd7-cdd331e90ef0 service nova] Acquiring lock "ae92919c-f2eb-4231-afea-b23269e09a0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.952076] env[68569]: DEBUG oslo_concurrency.lockutils [req-378021d7-93db-4e33-aa5c-2a844955b1d2 req-4ac2c3a8-1def-44a3-acd7-cdd331e90ef0 service nova] Lock "ae92919c-f2eb-4231-afea-b23269e09a0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 984.952477] env[68569]: DEBUG oslo_concurrency.lockutils [req-378021d7-93db-4e33-aa5c-2a844955b1d2 req-4ac2c3a8-1def-44a3-acd7-cdd331e90ef0 service nova] Lock "ae92919c-f2eb-4231-afea-b23269e09a0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.952477] env[68569]: DEBUG nova.compute.manager [req-378021d7-93db-4e33-aa5c-2a844955b1d2 req-4ac2c3a8-1def-44a3-acd7-cdd331e90ef0 service nova] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] No waiting events found dispatching network-vif-plugged-5de82b0c-a595-4eb1-89af-5ab6517061ad {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 984.952727] env[68569]: WARNING nova.compute.manager [req-378021d7-93db-4e33-aa5c-2a844955b1d2 req-4ac2c3a8-1def-44a3-acd7-cdd331e90ef0 service nova] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Received unexpected event network-vif-plugged-5de82b0c-a595-4eb1-89af-5ab6517061ad for instance with vm_state building and task_state spawning. [ 985.060179] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167448, 'name': Rename_Task, 'duration_secs': 0.208053} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.060499] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 985.060895] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30b2a313-9a42-4afb-a818-a22b618be24f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.066769] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 985.066769] env[68569]: value = "task-3167450" [ 985.066769] env[68569]: _type = "Task" [ 985.066769] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.075413] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167450, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.107961] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.916s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.111102] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.476s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.112343] env[68569]: DEBUG nova.objects.instance [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lazy-loading 'resources' on Instance uuid 6b2120d3-2e4b-4d1b-8109-6513b3b320eb {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.133011] env[68569]: INFO nova.scheduler.client.report [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleted allocations for instance b40c9dec-cebc-4d23-8df4-96e804333706 [ 985.232200] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "0dc5da15-4c10-4754-ac82-a130b933295d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.232466] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "0dc5da15-4c10-4754-ac82-a130b933295d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.232659] env[68569]: DEBUG nova.compute.manager [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 985.233733] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1e98a7-a104-440f-8c9c-43e53945db42 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.240873] env[68569]: DEBUG nova.compute.manager [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68569) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 985.353052] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167449, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074474} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.353427] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 985.354249] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173b182f-d4ca-434c-aac7-18bdcb1ce766 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.375743] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 1689f1f3-53f2-4c02-a969-e4dae21f14b7/1689f1f3-53f2-4c02-a969-e4dae21f14b7.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 985.376059] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de670143-5fc7-4b2d-9507-2b26097c7f23 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.395812] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 985.395812] env[68569]: value = "task-3167451" [ 985.395812] env[68569]: _type = "Task" [ 985.395812] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.403818] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167451, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.410634] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "refresh_cache-ae92919c-f2eb-4231-afea-b23269e09a0a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.410759] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired lock "refresh_cache-ae92919c-f2eb-4231-afea-b23269e09a0a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 985.411270] env[68569]: DEBUG nova.network.neutron [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 985.577219] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167450, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.640450] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5fc86d1d-1dba-45a9-92e7-94d98ca664d9 tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "b40c9dec-cebc-4d23-8df4-96e804333706" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.801s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.746285] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 985.748121] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e603dffc-1772-4306-8f69-f3447f9d76db {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.753698] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 985.753698] env[68569]: value = "task-3167452" [ 985.753698] env[68569]: _type = "Task" [ 985.753698] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.764419] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167452, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.863046] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca20ec08-d0f9-4524-b545-5dbed653ca78 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.871135] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9568ae5-75c8-45af-a3fb-5018c3083155 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.907084] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c5805d-115c-4672-b1fc-95b484780340 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.917077] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167451, 'name': ReconfigVM_Task, 'duration_secs': 0.290298} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.919522] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 1689f1f3-53f2-4c02-a969-e4dae21f14b7/1689f1f3-53f2-4c02-a969-e4dae21f14b7.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 985.920901] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-965fad47-9625-40db-ac4f-20564fd680f4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.923650] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6f35da-e93d-41c8-ba48-3f657849e0c1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.939887] env[68569]: DEBUG nova.compute.provider_tree [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 985.943032] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 985.943032] env[68569]: value = "task-3167453" [ 985.943032] env[68569]: _type = "Task" [ 985.943032] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.952947] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167453, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.955349] env[68569]: DEBUG nova.network.neutron [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 986.077304] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167450, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.126877] env[68569]: DEBUG nova.network.neutron [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Updating instance_info_cache with network_info: [{"id": "5de82b0c-a595-4eb1-89af-5ab6517061ad", "address": "fa:16:3e:0c:62:c2", "network": {"id": "bf7e9923-9223-4b14-8176-d3910d2b1c90", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1728039945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62ff17f9dcc242e0aff061402e57bdcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5de82b0c-a5", "ovs_interfaceid": "5de82b0c-a595-4eb1-89af-5ab6517061ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.265629] env[68569]: DEBUG oslo_vmware.api [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167452, 'name': PowerOffVM_Task, 'duration_secs': 0.228065} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.265956] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 986.266295] env[68569]: DEBUG nova.compute.manager [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 986.267243] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a37724c-fe90-4c14-b61d-bf4d72459e2c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.455954] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167453, 'name': Rename_Task, 'duration_secs': 0.132877} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.456352] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 986.456639] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a2226e52-35ae-465f-a5c1-6e4223bf9a27 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.464091] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 986.464091] env[68569]: value = "task-3167454" [ 986.464091] env[68569]: _type = "Task" [ 986.464091] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.474064] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167454, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.475013] env[68569]: DEBUG nova.scheduler.client.report [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 125 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 986.475252] env[68569]: DEBUG nova.compute.provider_tree [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 125 to 126 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 986.475430] env[68569]: DEBUG nova.compute.provider_tree [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 986.578251] env[68569]: DEBUG oslo_vmware.api [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167450, 'name': PowerOnVM_Task, 'duration_secs': 1.163967} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.578622] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 986.578823] env[68569]: INFO nova.compute.manager [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Took 9.08 seconds to spawn the instance on the hypervisor. [ 986.579010] env[68569]: DEBUG nova.compute.manager [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 986.579857] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86eb257f-477b-4534-89b5-2504481ba570 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.632041] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lock "refresh_cache-ae92919c-f2eb-4231-afea-b23269e09a0a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 986.632041] env[68569]: DEBUG nova.compute.manager [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Instance network_info: |[{"id": "5de82b0c-a595-4eb1-89af-5ab6517061ad", "address": "fa:16:3e:0c:62:c2", "network": {"id": "bf7e9923-9223-4b14-8176-d3910d2b1c90", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1728039945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62ff17f9dcc242e0aff061402e57bdcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5de82b0c-a5", "ovs_interfaceid": "5de82b0c-a595-4eb1-89af-5ab6517061ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 986.632498] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:62:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '667a2e97-c1be-421d-9941-6b84c2629b43', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5de82b0c-a595-4eb1-89af-5ab6517061ad', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 986.640322] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 986.640539] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 986.641079] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed5217c2-701e-4ec5-9b83-9b32109c0ceb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.660841] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 986.660841] env[68569]: value = "task-3167455" [ 986.660841] env[68569]: _type = "Task" [ 986.660841] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.669203] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167455, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.784111] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "0dc5da15-4c10-4754-ac82-a130b933295d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.548s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.784111] env[68569]: DEBUG oslo_concurrency.lockutils [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "398dd3c7-c630-4a29-b204-80f6fb394ce8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 986.784111] env[68569]: DEBUG oslo_concurrency.lockutils [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "398dd3c7-c630-4a29-b204-80f6fb394ce8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.784111] env[68569]: DEBUG oslo_concurrency.lockutils [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "398dd3c7-c630-4a29-b204-80f6fb394ce8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 986.784430] env[68569]: DEBUG oslo_concurrency.lockutils [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "398dd3c7-c630-4a29-b204-80f6fb394ce8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.784430] env[68569]: DEBUG oslo_concurrency.lockutils [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "398dd3c7-c630-4a29-b204-80f6fb394ce8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.785061] env[68569]: INFO nova.compute.manager [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Terminating instance [ 986.978799] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167454, 'name': PowerOnVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.981307] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.871s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.984084] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.897s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.984383] env[68569]: DEBUG nova.objects.instance [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lazy-loading 'resources' on Instance uuid 7b95aece-35db-4eab-b221-c5eccd749eae {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 987.007354] env[68569]: INFO nova.scheduler.client.report [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleted allocations for instance 6b2120d3-2e4b-4d1b-8109-6513b3b320eb [ 987.041609] env[68569]: DEBUG nova.compute.manager [req-a9119e35-32a9-489e-877b-32f729d2e648 req-0f4f8200-5a0e-405d-adab-401b71f8f98e service nova] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Received event network-changed-5de82b0c-a595-4eb1-89af-5ab6517061ad {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 987.041809] env[68569]: DEBUG nova.compute.manager [req-a9119e35-32a9-489e-877b-32f729d2e648 req-0f4f8200-5a0e-405d-adab-401b71f8f98e service nova] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Refreshing instance network info cache due to event network-changed-5de82b0c-a595-4eb1-89af-5ab6517061ad. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 987.042014] env[68569]: DEBUG oslo_concurrency.lockutils [req-a9119e35-32a9-489e-877b-32f729d2e648 req-0f4f8200-5a0e-405d-adab-401b71f8f98e service nova] Acquiring lock "refresh_cache-ae92919c-f2eb-4231-afea-b23269e09a0a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.042587] env[68569]: DEBUG oslo_concurrency.lockutils [req-a9119e35-32a9-489e-877b-32f729d2e648 req-0f4f8200-5a0e-405d-adab-401b71f8f98e service nova] Acquired lock "refresh_cache-ae92919c-f2eb-4231-afea-b23269e09a0a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.042822] env[68569]: DEBUG nova.network.neutron [req-a9119e35-32a9-489e-877b-32f729d2e648 req-0f4f8200-5a0e-405d-adab-401b71f8f98e service nova] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Refreshing network info cache for port 5de82b0c-a595-4eb1-89af-5ab6517061ad {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 987.097994] env[68569]: INFO nova.compute.manager [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Took 27.23 seconds to build instance. [ 987.171174] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167455, 'name': CreateVM_Task, 'duration_secs': 0.33901} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.171316] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 987.172051] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.172230] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.172577] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 987.172854] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7184f6b-cec6-44c6-b0c7-496cc3d2dcd6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.177931] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 987.177931] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52206cae-0a80-edfb-55af-e906865f68b0" [ 987.177931] env[68569]: _type = "Task" [ 987.177931] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.185871] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52206cae-0a80-edfb-55af-e906865f68b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.289084] env[68569]: DEBUG nova.compute.manager [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 987.289316] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 987.290153] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2643a28-0c0b-4eb7-96ca-0c9d58103881 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.294862] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.299247] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 987.299497] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5dc72746-b5f3-469a-89f4-318a6ae24bcb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.306130] env[68569]: DEBUG oslo_vmware.api [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 987.306130] env[68569]: value = "task-3167456" [ 987.306130] env[68569]: _type = "Task" [ 987.306130] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.314722] env[68569]: DEBUG oslo_vmware.api [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167456, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.476035] env[68569]: DEBUG oslo_vmware.api [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167454, 'name': PowerOnVM_Task, 'duration_secs': 0.692988} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.476603] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 987.476941] env[68569]: INFO nova.compute.manager [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Took 7.59 seconds to spawn the instance on the hypervisor. [ 987.477309] env[68569]: DEBUG nova.compute.manager [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 987.478389] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f46e18e-53ef-49b0-9d0c-ea4d3334391a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.516318] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6b8ab25-4204-4c70-94e8-763201295891 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "6b2120d3-2e4b-4d1b-8109-6513b3b320eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.135s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.600151] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d7b1127f-2f7f-402f-958b-aaa88c520ce9 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.747s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.695281] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52206cae-0a80-edfb-55af-e906865f68b0, 'name': SearchDatastore_Task, 'duration_secs': 0.018215} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.695666] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 987.695969] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 987.696293] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.697140] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.697140] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 987.697516] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b155665-e05c-47a5-aa4e-14e2aa8fb32a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.711184] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.711286] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 987.711930] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c25ba3b7-7def-4db7-a43e-659c075b086f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.720509] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 987.720509] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5230c5e7-35f0-06af-42a7-02bdb5f1adb9" [ 987.720509] env[68569]: _type = "Task" [ 987.720509] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.728850] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5230c5e7-35f0-06af-42a7-02bdb5f1adb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.731628] env[68569]: DEBUG oslo_concurrency.lockutils [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "0dc5da15-4c10-4754-ac82-a130b933295d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.731876] env[68569]: DEBUG oslo_concurrency.lockutils [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "0dc5da15-4c10-4754-ac82-a130b933295d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.732151] env[68569]: DEBUG oslo_concurrency.lockutils [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "0dc5da15-4c10-4754-ac82-a130b933295d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.732408] env[68569]: DEBUG oslo_concurrency.lockutils [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "0dc5da15-4c10-4754-ac82-a130b933295d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.732720] env[68569]: DEBUG oslo_concurrency.lockutils [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "0dc5da15-4c10-4754-ac82-a130b933295d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.735020] env[68569]: INFO nova.compute.manager [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Terminating instance [ 987.769495] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4049899c-e4b6-4c1e-a56c-ea967ab34cf6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.777122] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1f4aed-2648-4ecc-a65c-1f7ad207e6fa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.812396] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ab14dd-8eb9-4199-8e64-dc4d28020238 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.823096] env[68569]: DEBUG oslo_vmware.api [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167456, 'name': PowerOffVM_Task, 'duration_secs': 0.198705} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.823534] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 987.823779] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 987.825042] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979b492b-7925-4002-b6ce-f037fc920e1f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.829083] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25fa523c-3133-4d15-a82a-1c3ffdff4644 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.840448] env[68569]: DEBUG nova.compute.provider_tree [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.893337] env[68569]: DEBUG nova.network.neutron [req-a9119e35-32a9-489e-877b-32f729d2e648 req-0f4f8200-5a0e-405d-adab-401b71f8f98e service nova] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Updated VIF entry in instance network info cache for port 5de82b0c-a595-4eb1-89af-5ab6517061ad. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 987.893703] env[68569]: DEBUG nova.network.neutron [req-a9119e35-32a9-489e-877b-32f729d2e648 req-0f4f8200-5a0e-405d-adab-401b71f8f98e service nova] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Updating instance_info_cache with network_info: [{"id": "5de82b0c-a595-4eb1-89af-5ab6517061ad", "address": "fa:16:3e:0c:62:c2", "network": {"id": "bf7e9923-9223-4b14-8176-d3910d2b1c90", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1728039945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62ff17f9dcc242e0aff061402e57bdcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5de82b0c-a5", "ovs_interfaceid": "5de82b0c-a595-4eb1-89af-5ab6517061ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.002631] env[68569]: INFO nova.compute.manager [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Took 24.82 seconds to build instance. [ 988.065670] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 988.065921] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 988.066171] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleting the datastore file [datastore1] 398dd3c7-c630-4a29-b204-80f6fb394ce8 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 988.066671] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ccb6fda-4602-4d23-9396-5185ef460603 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.074476] env[68569]: DEBUG oslo_vmware.api [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for the task: (returnval){ [ 988.074476] env[68569]: value = "task-3167458" [ 988.074476] env[68569]: _type = "Task" [ 988.074476] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.086772] env[68569]: DEBUG oslo_vmware.api [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167458, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.231420] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5230c5e7-35f0-06af-42a7-02bdb5f1adb9, 'name': SearchDatastore_Task, 'duration_secs': 0.018746} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.232300] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be7a2d75-0b45-4bdd-8122-c45f1d7f6a48 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.237367] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 988.237367] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5233b7ec-07c9-52d5-20d3-9de07a2cc43e" [ 988.237367] env[68569]: _type = "Task" [ 988.237367] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.241289] env[68569]: DEBUG nova.compute.manager [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 988.241468] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 988.242215] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59083b88-47ff-4b22-a27e-2268a81afbbf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.247965] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5233b7ec-07c9-52d5-20d3-9de07a2cc43e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.252219] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 988.252467] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c1aedfa3-a1a7-4fc4-85ef-9c3930ac0bf4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.324350] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 988.324662] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 988.324880] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleting the datastore file [datastore2] 0dc5da15-4c10-4754-ac82-a130b933295d {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 988.325215] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd072a1d-e560-45a8-9769-508c817e1eea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.331888] env[68569]: DEBUG oslo_vmware.api [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 988.331888] env[68569]: value = "task-3167460" [ 988.331888] env[68569]: _type = "Task" [ 988.331888] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.341884] env[68569]: DEBUG oslo_vmware.api [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167460, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.345833] env[68569]: DEBUG nova.scheduler.client.report [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 988.396670] env[68569]: DEBUG oslo_concurrency.lockutils [req-a9119e35-32a9-489e-877b-32f729d2e648 req-0f4f8200-5a0e-405d-adab-401b71f8f98e service nova] Releasing lock "refresh_cache-ae92919c-f2eb-4231-afea-b23269e09a0a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.504638] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ad68078-bf2e-4a62-93ff-3ab0a54d0b8f tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "1689f1f3-53f2-4c02-a969-e4dae21f14b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.333s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.584978] env[68569]: DEBUG oslo_vmware.api [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Task: {'id': task-3167458, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.322535} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.585137] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 988.585320] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 988.585518] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 988.585662] env[68569]: INFO nova.compute.manager [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Took 1.30 seconds to destroy the instance on the hypervisor. [ 988.585933] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 988.586112] env[68569]: DEBUG nova.compute.manager [-] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 988.586208] env[68569]: DEBUG nova.network.neutron [-] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 988.748323] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5233b7ec-07c9-52d5-20d3-9de07a2cc43e, 'name': SearchDatastore_Task, 'duration_secs': 0.015181} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.748588] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.748840] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] ae92919c-f2eb-4231-afea-b23269e09a0a/ae92919c-f2eb-4231-afea-b23269e09a0a.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 988.749123] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d040c9ea-9c7d-4c3f-9a99-13b04eb47620 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.755558] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 988.755558] env[68569]: value = "task-3167461" [ 988.755558] env[68569]: _type = "Task" [ 988.755558] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.763584] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167461, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.842304] env[68569]: DEBUG oslo_vmware.api [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167460, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281595} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.842625] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 988.842840] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 988.843081] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 988.843287] env[68569]: INFO nova.compute.manager [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 988.843552] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 988.843780] env[68569]: DEBUG nova.compute.manager [-] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 988.843882] env[68569]: DEBUG nova.network.neutron [-] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 988.852973] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.869s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.855862] env[68569]: DEBUG oslo_concurrency.lockutils [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.697s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.856171] env[68569]: DEBUG nova.objects.instance [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lazy-loading 'resources' on Instance uuid adc7f255-be88-4eda-be25-f9ecc9d9bf99 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.878094] env[68569]: INFO nova.scheduler.client.report [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleted allocations for instance 7b95aece-35db-4eab-b221-c5eccd749eae [ 989.079523] env[68569]: DEBUG nova.compute.manager [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Received event network-changed-feea2419-1c39-4028-811f-c72311dae7a4 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 989.079523] env[68569]: DEBUG nova.compute.manager [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Refreshing instance network info cache due to event network-changed-feea2419-1c39-4028-811f-c72311dae7a4. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 989.079523] env[68569]: DEBUG oslo_concurrency.lockutils [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] Acquiring lock "refresh_cache-92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.079523] env[68569]: DEBUG oslo_concurrency.lockutils [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] Acquired lock "refresh_cache-92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.079523] env[68569]: DEBUG nova.network.neutron [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Refreshing network info cache for port feea2419-1c39-4028-811f-c72311dae7a4 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 989.179105] env[68569]: DEBUG nova.compute.manager [req-db090866-61f0-4eeb-b4a6-03d678da045e req-0068bbbb-5916-47bc-bcc1-c08bf9a7187d service nova] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Received event network-vif-deleted-6c2568f4-2500-47df-982b-bf146c8d29d5 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 989.179320] env[68569]: INFO nova.compute.manager [req-db090866-61f0-4eeb-b4a6-03d678da045e req-0068bbbb-5916-47bc-bcc1-c08bf9a7187d service nova] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Neutron deleted interface 6c2568f4-2500-47df-982b-bf146c8d29d5; detaching it from the instance and deleting it from the info cache [ 989.179407] env[68569]: DEBUG nova.network.neutron [req-db090866-61f0-4eeb-b4a6-03d678da045e req-0068bbbb-5916-47bc-bcc1-c08bf9a7187d service nova] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.273391] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167461, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.392877] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3cd6053-9af9-45f9-8066-e3f5083a173b tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "7b95aece-35db-4eab-b221-c5eccd749eae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.767s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 989.469250] env[68569]: DEBUG nova.network.neutron [-] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.620165] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9186e6dd-c85b-4c94-a0dd-6e552d3a1cf2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.629211] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fc4386-517e-46e9-aae4-02e410eadfb6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.634293] env[68569]: DEBUG nova.network.neutron [-] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.679542] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f7ae05-b43a-44b0-89a7-2825b3dc34a1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.687891] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3062761-b09e-4276-af40-0c9314a643cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.696687] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e11f24-8e3d-4a66-b2b5-ec29f94de450 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.710291] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efff2392-0394-4cab-87ac-3ec745d3d82c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.734224] env[68569]: DEBUG nova.compute.provider_tree [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.760208] env[68569]: DEBUG nova.compute.manager [req-db090866-61f0-4eeb-b4a6-03d678da045e req-0068bbbb-5916-47bc-bcc1-c08bf9a7187d service nova] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Detach interface failed, port_id=6c2568f4-2500-47df-982b-bf146c8d29d5, reason: Instance 0dc5da15-4c10-4754-ac82-a130b933295d could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 989.771537] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167461, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.591333} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.771752] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] ae92919c-f2eb-4231-afea-b23269e09a0a/ae92919c-f2eb-4231-afea-b23269e09a0a.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 989.771952] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 989.772565] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01efae5f-1ac2-4769-bbc4-240811aeebb7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.779081] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 989.779081] env[68569]: value = "task-3167462" [ 989.779081] env[68569]: _type = "Task" [ 989.779081] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.788262] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167462, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.880634] env[68569]: DEBUG nova.network.neutron [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Updated VIF entry in instance network info cache for port feea2419-1c39-4028-811f-c72311dae7a4. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 989.881021] env[68569]: DEBUG nova.network.neutron [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Updating instance_info_cache with network_info: [{"id": "feea2419-1c39-4028-811f-c72311dae7a4", "address": "fa:16:3e:16:ea:60", "network": {"id": "9b574e65-367b-4135-8ce6-e40fd22bc9f3", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1543341439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "713d88f021794769a64eef3807ade9be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfeea2419-1c", "ovs_interfaceid": "feea2419-1c39-4028-811f-c72311dae7a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.972580] env[68569]: INFO nova.compute.manager [-] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Took 1.39 seconds to deallocate network for instance. [ 990.139240] env[68569]: INFO nova.compute.manager [-] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Took 1.29 seconds to deallocate network for instance. [ 990.239572] env[68569]: DEBUG nova.scheduler.client.report [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 990.290045] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167462, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071745} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.290341] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 990.291179] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c96d55-0327-43ca-80d8-e96b9bd7ed94 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.315353] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] ae92919c-f2eb-4231-afea-b23269e09a0a/ae92919c-f2eb-4231-afea-b23269e09a0a.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 990.315653] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31b47a00-f00b-400c-8772-6ef7e510a374 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.337060] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 990.337060] env[68569]: value = "task-3167463" [ 990.337060] env[68569]: _type = "Task" [ 990.337060] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.344576] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167463, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.384477] env[68569]: DEBUG oslo_concurrency.lockutils [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] Releasing lock "refresh_cache-92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.384770] env[68569]: DEBUG nova.compute.manager [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Received event network-vif-deleted-2d1e0d6d-0c37-425c-a138-7bc79f96a3a2 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 990.385187] env[68569]: INFO nova.compute.manager [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Neutron deleted interface 2d1e0d6d-0c37-425c-a138-7bc79f96a3a2; detaching it from the instance and deleting it from the info cache [ 990.385533] env[68569]: DEBUG nova.network.neutron [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.504439] env[68569]: DEBUG oslo_concurrency.lockutils [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.644976] env[68569]: DEBUG oslo_concurrency.lockutils [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.745099] env[68569]: DEBUG oslo_concurrency.lockutils [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.889s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.747923] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.168s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.772676] env[68569]: INFO nova.scheduler.client.report [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleted allocations for instance adc7f255-be88-4eda-be25-f9ecc9d9bf99 [ 990.832537] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "705bbc03-30ac-4d5b-a3f0-6505171a69fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.832771] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "705bbc03-30ac-4d5b-a3f0-6505171a69fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.846674] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167463, 'name': ReconfigVM_Task, 'duration_secs': 0.293296} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.849309] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Reconfigured VM instance instance-00000056 to attach disk [datastore1] ae92919c-f2eb-4231-afea-b23269e09a0a/ae92919c-f2eb-4231-afea-b23269e09a0a.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 990.850349] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2cabc0c-0101-4813-8793-406ecd2bb86e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.857154] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 990.857154] env[68569]: value = "task-3167464" [ 990.857154] env[68569]: _type = "Task" [ 990.857154] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.868365] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167464, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.888347] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c32932c9-6ba3-4f86-9dce-81e2868eb284 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.896941] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb961a6-4cbb-4725-99e2-ac85393bfc00 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.930552] env[68569]: DEBUG nova.compute.manager [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Detach interface failed, port_id=2d1e0d6d-0c37-425c-a138-7bc79f96a3a2, reason: Instance 398dd3c7-c630-4a29-b204-80f6fb394ce8 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 990.930867] env[68569]: DEBUG nova.compute.manager [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Received event network-changed-e0dc9415-fd47-40f4-aa7d-b89db0dccfae {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 990.931071] env[68569]: DEBUG nova.compute.manager [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Refreshing instance network info cache due to event network-changed-e0dc9415-fd47-40f4-aa7d-b89db0dccfae. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 990.931295] env[68569]: DEBUG oslo_concurrency.lockutils [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] Acquiring lock "refresh_cache-1689f1f3-53f2-4c02-a969-e4dae21f14b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.931436] env[68569]: DEBUG oslo_concurrency.lockutils [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] Acquired lock "refresh_cache-1689f1f3-53f2-4c02-a969-e4dae21f14b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.931592] env[68569]: DEBUG nova.network.neutron [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Refreshing network info cache for port e0dc9415-fd47-40f4-aa7d-b89db0dccfae {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 991.004513] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c4d97b-7a10-46d7-ad2c-865e85e10f61 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.012772] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbce2d11-1eeb-4db8-85ff-6e7889dddbe5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.045654] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f4e310-ff63-45cb-a71d-c3f5d014ede2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.053098] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd2f6e2-f7fe-4cc3-a514-08311b67b5cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.066626] env[68569]: DEBUG nova.compute.provider_tree [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.280266] env[68569]: DEBUG oslo_concurrency.lockutils [None req-22161324-dfbe-43ac-9951-950d2c96d5f0 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "adc7f255-be88-4eda-be25-f9ecc9d9bf99" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.042s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.335410] env[68569]: DEBUG nova.compute.manager [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 991.373036] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167464, 'name': Rename_Task, 'duration_secs': 0.146255} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.373036] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 991.373036] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-333e8a28-3d02-4882-897e-dac94a84aa6d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.376627] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 991.376627] env[68569]: value = "task-3167465" [ 991.376627] env[68569]: _type = "Task" [ 991.376627] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.385145] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167465, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.569933] env[68569]: DEBUG nova.scheduler.client.report [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 991.768252] env[68569]: DEBUG nova.network.neutron [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Updated VIF entry in instance network info cache for port e0dc9415-fd47-40f4-aa7d-b89db0dccfae. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 991.768711] env[68569]: DEBUG nova.network.neutron [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Updating instance_info_cache with network_info: [{"id": "e0dc9415-fd47-40f4-aa7d-b89db0dccfae", "address": "fa:16:3e:9b:80:66", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0dc9415-fd", "ovs_interfaceid": "e0dc9415-fd47-40f4-aa7d-b89db0dccfae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.865534] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.890068] env[68569]: DEBUG oslo_vmware.api [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167465, 'name': PowerOnVM_Task, 'duration_secs': 0.475728} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.890215] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 991.891624] env[68569]: INFO nova.compute.manager [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Took 7.67 seconds to spawn the instance on the hypervisor. [ 991.891806] env[68569]: DEBUG nova.compute.manager [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 991.893035] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f175112-5884-4b45-a163-1c94cc543f76 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.075971] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.328s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.076392] env[68569]: INFO nova.compute.manager [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Successfully reverted task state from resize_migrating on failure for instance. [ 992.085963] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.282s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.087533] env[68569]: INFO nova.compute.claims [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 992.090799] env[68569]: ERROR oslo_messaging.rpc.server [None req-e50a310c-dc63-4f59-b017-a4eb7c74ae19 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Exception during message handling: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 992.090799] env[68569]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 992.090799] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 992.090799] env[68569]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 992.090799] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 992.090799] env[68569]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 992.090799] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 992.090799] env[68569]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 992.090799] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 992.090799] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 992.090799] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 992.090799] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 992.090799] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 992.090799] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 992.090799] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 168, in decorated_function [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 159, in decorated_function [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 215, in decorated_function [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 992.091388] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 205, in decorated_function [ 992.091950] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 992.091950] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6583, in resize_instance [ 992.091950] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 992.091950] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 992.091950] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 992.091950] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 992.091950] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 992.091950] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6580, in resize_instance [ 992.091950] env[68569]: ERROR oslo_messaging.rpc.server self._resize_instance(context, instance, image, migration, [ 992.091950] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 992.091950] env[68569]: ERROR oslo_messaging.rpc.server disk_info = self.driver.migrate_disk_and_power_off( [ 992.091950] env[68569]: ERROR oslo_messaging.rpc.server TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 992.091950] env[68569]: ERROR oslo_messaging.rpc.server [ 992.271218] env[68569]: DEBUG oslo_concurrency.lockutils [req-9e7c799a-0b9b-45f5-a2ab-1a6d8276aa52 req-a48db35d-23b6-4ebe-abf8-9ea0cf50e4f8 service nova] Releasing lock "refresh_cache-1689f1f3-53f2-4c02-a969-e4dae21f14b7" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.418959] env[68569]: INFO nova.compute.manager [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Took 22.53 seconds to build instance. [ 993.180528] env[68569]: DEBUG oslo_concurrency.lockutils [None req-869f4f53-28e1-442a-b710-a0b639c1ed89 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "ae92919c-f2eb-4231-afea-b23269e09a0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.304s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.186099] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "1670f03a-94e2-4005-be7e-41aad61a8925" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.186099] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "1670f03a-94e2-4005-be7e-41aad61a8925" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.690155] env[68569]: DEBUG nova.compute.manager [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 993.693663] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afce5af2-afe1-4f89-8a9c-025803f634d3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.701154] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7d77890a-8077-4e53-850a-4e15af12ad27 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Suspending the VM {{(pid=68569) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 993.701816] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-2b599b1a-b499-46ae-9c70-69181f5bd6b9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.709373] env[68569]: DEBUG oslo_vmware.api [None req-7d77890a-8077-4e53-850a-4e15af12ad27 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 993.709373] env[68569]: value = "task-3167466" [ 993.709373] env[68569]: _type = "Task" [ 993.709373] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.721086] env[68569]: DEBUG oslo_vmware.api [None req-7d77890a-8077-4e53-850a-4e15af12ad27 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167466, 'name': SuspendVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.948513] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f741b4-9bf0-4a06-8cc3-1ef7e10a612a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.957018] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a922e6-ba4a-42b2-af13-edb620684727 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.989517] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b0d520-e3a5-4059-bb6e-32672afdde21 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.996899] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed36fcf1-5741-4337-aae9-d7e3ca11279f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.012489] env[68569]: DEBUG nova.compute.provider_tree [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.040673] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b59633-ca21-8204-1547-a6b0df940c8e/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 994.041746] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8b16ca-22ef-47d9-8f07-4eadc700f48c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.048307] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b59633-ca21-8204-1547-a6b0df940c8e/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 994.048486] env[68569]: ERROR oslo_vmware.rw_handles [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b59633-ca21-8204-1547-a6b0df940c8e/disk-0.vmdk due to incomplete transfer. [ 994.048763] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ddced3eb-6f4a-43c9-9d73-ea6d27f6cc2c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.056668] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b59633-ca21-8204-1547-a6b0df940c8e/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 994.056871] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Uploaded image f1fc5b9e-4d0a-4b68-b39b-3d0785190c06 to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 994.059523] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 994.059796] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1e59cfea-c526-4f90-b81c-ca20100d7fee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.066234] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 994.066234] env[68569]: value = "task-3167467" [ 994.066234] env[68569]: _type = "Task" [ 994.066234] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.074146] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167467, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.213106] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.222554] env[68569]: DEBUG oslo_vmware.api [None req-7d77890a-8077-4e53-850a-4e15af12ad27 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167466, 'name': SuspendVM_Task} progress is 58%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.518055] env[68569]: DEBUG nova.scheduler.client.report [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 994.576374] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167467, 'name': Destroy_Task, 'duration_secs': 0.473371} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.576676] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Destroyed the VM [ 994.576871] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 994.577134] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e7c6ca9f-0f04-4beb-aede-6c5cf65cc633 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.582492] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 994.582492] env[68569]: value = "task-3167468" [ 994.582492] env[68569]: _type = "Task" [ 994.582492] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.590211] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167468, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.723331] env[68569]: DEBUG oslo_vmware.api [None req-7d77890a-8077-4e53-850a-4e15af12ad27 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167466, 'name': SuspendVM_Task, 'duration_secs': 0.854991} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.723718] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7d77890a-8077-4e53-850a-4e15af12ad27 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Suspended the VM {{(pid=68569) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 994.723833] env[68569]: DEBUG nova.compute.manager [None req-7d77890a-8077-4e53-850a-4e15af12ad27 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 994.724602] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd616a3-ed67-424f-9cf4-c79bb0096689 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.024655] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.938s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.024655] env[68569]: DEBUG nova.compute.manager [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 995.027215] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.105s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.028433] env[68569]: DEBUG nova.objects.instance [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lazy-loading 'resources' on Instance uuid f8b56e81-f3ef-489b-a64c-be687cf99fd1 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 995.093042] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167468, 'name': RemoveSnapshot_Task, 'duration_secs': 0.370461} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.093321] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 995.093596] env[68569]: DEBUG nova.compute.manager [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 995.094376] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29976b93-9b45-4627-a7de-5cb933b904f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.531032] env[68569]: DEBUG nova.compute.utils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 995.536597] env[68569]: DEBUG nova.compute.manager [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 995.536597] env[68569]: DEBUG nova.network.neutron [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 995.591114] env[68569]: DEBUG nova.policy [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afebab35cda9438781e2b466ce586405', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cc82d358e214a959ae6b34c33344b86', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 995.608434] env[68569]: INFO nova.compute.manager [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Shelve offloading [ 995.751969] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b51575-28e5-40ab-9a84-c34be76c6561 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.759565] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5aeb62-43c1-473c-b446-35d5677653d5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.788037] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d814581-9354-4c9e-81c6-918c1cf9dd3c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.794865] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa23589-c6e1-4d72-a16d-6aaa167962c7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.809053] env[68569]: DEBUG nova.compute.provider_tree [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.854866] env[68569]: DEBUG nova.network.neutron [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Successfully created port: ceb81daf-a1b8-41e6-a68d-583d6349ab13 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 996.037489] env[68569]: DEBUG nova.compute.manager [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 996.113081] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 996.113422] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70ed04c7-762e-4437-b529-81679e4abfb9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.121921] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 996.121921] env[68569]: value = "task-3167469" [ 996.121921] env[68569]: _type = "Task" [ 996.121921] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.130313] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167469, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.312782] env[68569]: DEBUG nova.scheduler.client.report [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 996.352027] env[68569]: DEBUG nova.compute.manager [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 996.352953] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0152d172-25aa-4516-aa46-3d5b7a89c803 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.633422] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] VM already powered off {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 996.633582] env[68569]: DEBUG nova.compute.manager [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 996.634412] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce6f48b-e68f-4219-bd45-f9a2ff43a3c1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.640513] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.640675] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquired lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.640880] env[68569]: DEBUG nova.network.neutron [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 996.817591] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.790s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.819996] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 12.089s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.820190] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.820354] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68569) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 996.820701] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.921s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.820874] env[68569]: DEBUG nova.objects.instance [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lazy-loading 'resources' on Instance uuid 53cc8dbd-c163-403a-9286-e1f8ad939f94 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.822793] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b9baf0-0d3b-48da-aa19-f2a46eafc814 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.830835] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8bd662-3b7a-4f7f-9b57-d15513507196 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.845929] env[68569]: INFO nova.scheduler.client.report [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleted allocations for instance f8b56e81-f3ef-489b-a64c-be687cf99fd1 [ 996.850644] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f747594-f550-4eca-a9de-db41d591d30f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.858105] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607e93d1-e1fd-4321-a2b9-cad9f6d06761 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.863666] env[68569]: INFO nova.compute.manager [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] instance snapshotting [ 996.863856] env[68569]: WARNING nova.compute.manager [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 996.890870] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953cc755-8d06-4e8c-a5f7-d99882aed9f4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.894510] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178717MB free_disk=128GB free_vcpus=48 pci_devices=None {{(pid=68569) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 996.894655] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.911100] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a263758-989a-43f5-81b9-e9b2382e3a4e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.048949] env[68569]: DEBUG nova.compute.manager [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 997.070993] env[68569]: DEBUG nova.virt.hardware [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 997.071266] env[68569]: DEBUG nova.virt.hardware [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.071419] env[68569]: DEBUG nova.virt.hardware [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 997.071915] env[68569]: DEBUG nova.virt.hardware [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.071915] env[68569]: DEBUG nova.virt.hardware [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 997.071915] env[68569]: DEBUG nova.virt.hardware [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 997.072088] env[68569]: DEBUG nova.virt.hardware [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 997.072224] env[68569]: DEBUG nova.virt.hardware [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 997.072391] env[68569]: DEBUG nova.virt.hardware [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 997.072542] env[68569]: DEBUG nova.virt.hardware [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 997.072984] env[68569]: DEBUG nova.virt.hardware [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 997.073546] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9666b65-7913-4a97-be80-47fc5d90a7da {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.081304] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50fe20d-a09d-42d2-b639-696ffc34cdf2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.282664] env[68569]: DEBUG nova.compute.manager [req-2e148362-37c4-4039-8aba-a7c0afb8fe5d req-47dc3a1d-f301-4489-97d6-dadd2daa26a2 service nova] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Received event network-vif-plugged-ceb81daf-a1b8-41e6-a68d-583d6349ab13 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 997.283026] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e148362-37c4-4039-8aba-a7c0afb8fe5d req-47dc3a1d-f301-4489-97d6-dadd2daa26a2 service nova] Acquiring lock "281d7077-391a-4cce-9d31-af41568a2b7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.283119] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e148362-37c4-4039-8aba-a7c0afb8fe5d req-47dc3a1d-f301-4489-97d6-dadd2daa26a2 service nova] Lock "281d7077-391a-4cce-9d31-af41568a2b7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.283300] env[68569]: DEBUG oslo_concurrency.lockutils [req-2e148362-37c4-4039-8aba-a7c0afb8fe5d req-47dc3a1d-f301-4489-97d6-dadd2daa26a2 service nova] Lock "281d7077-391a-4cce-9d31-af41568a2b7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.283424] env[68569]: DEBUG nova.compute.manager [req-2e148362-37c4-4039-8aba-a7c0afb8fe5d req-47dc3a1d-f301-4489-97d6-dadd2daa26a2 service nova] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] No waiting events found dispatching network-vif-plugged-ceb81daf-a1b8-41e6-a68d-583d6349ab13 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 997.283586] env[68569]: WARNING nova.compute.manager [req-2e148362-37c4-4039-8aba-a7c0afb8fe5d req-47dc3a1d-f301-4489-97d6-dadd2daa26a2 service nova] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Received unexpected event network-vif-plugged-ceb81daf-a1b8-41e6-a68d-583d6349ab13 for instance with vm_state building and task_state spawning. [ 997.348890] env[68569]: DEBUG nova.network.neutron [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Successfully updated port: ceb81daf-a1b8-41e6-a68d-583d6349ab13 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 997.352744] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a763b26c-71c9-4c1f-90a3-52d50040297e tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "f8b56e81-f3ef-489b-a64c-be687cf99fd1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.013s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.359628] env[68569]: DEBUG nova.network.neutron [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Updating instance_info_cache with network_info: [{"id": "d13b7716-5b05-4896-9da9-e3674d55a3a0", "address": "fa:16:3e:c3:61:8b", "network": {"id": "63b6f3d4-66a9-477c-b7a6-566cf0d9ab2d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-638108957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b924e47d91a488e9d912a5369792aa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd13b7716-5b", "ovs_interfaceid": "d13b7716-5b05-4896-9da9-e3674d55a3a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.423223] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 997.423510] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6d935666-8242-424c-ac8a-15fbaed8a035 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.435857] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 997.435857] env[68569]: value = "task-3167470" [ 997.435857] env[68569]: _type = "Task" [ 997.435857] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.446846] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167470, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.518060] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127f8bf0-b6c1-4fd0-bf06-33adf8072bc5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.525599] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ca7b42-f161-4225-b920-e8f086948a2c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.555681] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5905c1-8357-4a22-8e96-d0475f25e191 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.562730] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b0f2719-2364-40b5-9d80-445b01015294 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.575521] env[68569]: DEBUG nova.compute.provider_tree [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.853668] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "refresh_cache-281d7077-391a-4cce-9d31-af41568a2b7c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.853919] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "refresh_cache-281d7077-391a-4cce-9d31-af41568a2b7c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.854091] env[68569]: DEBUG nova.network.neutron [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 997.861503] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Releasing lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.948050] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167470, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.069302] env[68569]: DEBUG nova.compute.manager [req-f77355b6-6798-4572-b39f-752152f4a83a req-8457d42f-b2c9-470c-aab9-3cbbeceef4d4 service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Received event network-vif-unplugged-d13b7716-5b05-4896-9da9-e3674d55a3a0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 998.069577] env[68569]: DEBUG oslo_concurrency.lockutils [req-f77355b6-6798-4572-b39f-752152f4a83a req-8457d42f-b2c9-470c-aab9-3cbbeceef4d4 service nova] Acquiring lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.069872] env[68569]: DEBUG oslo_concurrency.lockutils [req-f77355b6-6798-4572-b39f-752152f4a83a req-8457d42f-b2c9-470c-aab9-3cbbeceef4d4 service nova] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.070915] env[68569]: DEBUG oslo_concurrency.lockutils [req-f77355b6-6798-4572-b39f-752152f4a83a req-8457d42f-b2c9-470c-aab9-3cbbeceef4d4 service nova] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.071160] env[68569]: DEBUG nova.compute.manager [req-f77355b6-6798-4572-b39f-752152f4a83a req-8457d42f-b2c9-470c-aab9-3cbbeceef4d4 service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] No waiting events found dispatching network-vif-unplugged-d13b7716-5b05-4896-9da9-e3674d55a3a0 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 998.071370] env[68569]: WARNING nova.compute.manager [req-f77355b6-6798-4572-b39f-752152f4a83a req-8457d42f-b2c9-470c-aab9-3cbbeceef4d4 service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Received unexpected event network-vif-unplugged-d13b7716-5b05-4896-9da9-e3674d55a3a0 for instance with vm_state shelved and task_state shelving_offloading. [ 998.078105] env[68569]: DEBUG nova.scheduler.client.report [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 998.090087] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 998.091277] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbce4f52-2202-4587-a025-29ee6358b232 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.100256] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 998.100482] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e820e68-01f5-42ae-8826-9c2ff86fa439 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.387209] env[68569]: DEBUG nova.network.neutron [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 998.449174] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167470, 'name': CreateSnapshot_Task, 'duration_secs': 0.629039} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.450053] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 998.450219] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ceafa1c-cb0a-4470-80ee-c599004b1e39 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.549094] env[68569]: DEBUG nova.network.neutron [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Updating instance_info_cache with network_info: [{"id": "ceb81daf-a1b8-41e6-a68d-583d6349ab13", "address": "fa:16:3e:90:51:17", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapceb81daf-a1", "ovs_interfaceid": "ceb81daf-a1b8-41e6-a68d-583d6349ab13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.582813] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.762s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.585037] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 11.290s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.585236] env[68569]: DEBUG nova.objects.instance [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68569) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 998.601585] env[68569]: INFO nova.scheduler.client.report [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Deleted allocations for instance 53cc8dbd-c163-403a-9286-e1f8ad939f94 [ 998.916363] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 998.916720] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 998.916909] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Deleting the datastore file [datastore1] c0ea0ef8-93c2-416a-8caa-a51f7a39627e {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.917226] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a362a6c0-979d-4ee3-82ee-e229393b1b84 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.924182] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 998.924182] env[68569]: value = "task-3167472" [ 998.924182] env[68569]: _type = "Task" [ 998.924182] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.931535] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167472, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.969556] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 998.969914] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8e73d108-d484-46e6-b8e7-52aeb42e45d4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.978146] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 998.978146] env[68569]: value = "task-3167473" [ 998.978146] env[68569]: _type = "Task" [ 998.978146] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.986623] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167473, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.051387] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "refresh_cache-281d7077-391a-4cce-9d31-af41568a2b7c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.051744] env[68569]: DEBUG nova.compute.manager [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Instance network_info: |[{"id": "ceb81daf-a1b8-41e6-a68d-583d6349ab13", "address": "fa:16:3e:90:51:17", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapceb81daf-a1", "ovs_interfaceid": "ceb81daf-a1b8-41e6-a68d-583d6349ab13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 999.052203] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:51:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ceb81daf-a1b8-41e6-a68d-583d6349ab13', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 999.059591] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 999.059815] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 999.060087] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0225da9-69bb-4b8a-a7a1-5c275d2fda81 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.080970] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 999.080970] env[68569]: value = "task-3167474" [ 999.080970] env[68569]: _type = "Task" [ 999.080970] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.091781] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167474, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.110665] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8753d2b7-7180-4a32-8d71-3cb85a4ad08c tempest-ServersWithSpecificFlavorTestJSON-960738793 tempest-ServersWithSpecificFlavorTestJSON-960738793-project-member] Lock "53cc8dbd-c163-403a-9286-e1f8ad939f94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.938s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.311297] env[68569]: DEBUG nova.compute.manager [req-cd94f4d1-a535-41a9-9a4b-4fde446a3f46 req-d8698427-063b-457f-9acc-798b7d10f547 service nova] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Received event network-changed-ceb81daf-a1b8-41e6-a68d-583d6349ab13 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 999.311513] env[68569]: DEBUG nova.compute.manager [req-cd94f4d1-a535-41a9-9a4b-4fde446a3f46 req-d8698427-063b-457f-9acc-798b7d10f547 service nova] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Refreshing instance network info cache due to event network-changed-ceb81daf-a1b8-41e6-a68d-583d6349ab13. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 999.311745] env[68569]: DEBUG oslo_concurrency.lockutils [req-cd94f4d1-a535-41a9-9a4b-4fde446a3f46 req-d8698427-063b-457f-9acc-798b7d10f547 service nova] Acquiring lock "refresh_cache-281d7077-391a-4cce-9d31-af41568a2b7c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.311893] env[68569]: DEBUG oslo_concurrency.lockutils [req-cd94f4d1-a535-41a9-9a4b-4fde446a3f46 req-d8698427-063b-457f-9acc-798b7d10f547 service nova] Acquired lock "refresh_cache-281d7077-391a-4cce-9d31-af41568a2b7c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.312070] env[68569]: DEBUG nova.network.neutron [req-cd94f4d1-a535-41a9-9a4b-4fde446a3f46 req-d8698427-063b-457f-9acc-798b7d10f547 service nova] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Refreshing network info cache for port ceb81daf-a1b8-41e6-a68d-583d6349ab13 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 999.434922] env[68569]: DEBUG oslo_vmware.api [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167472, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152055} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.435412] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 999.436022] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 999.436138] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 999.462585] env[68569]: INFO nova.scheduler.client.report [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Deleted allocations for instance c0ea0ef8-93c2-416a-8caa-a51f7a39627e [ 999.488880] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167473, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.590335] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167474, 'name': CreateVM_Task, 'duration_secs': 0.406055} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.590505] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 999.591225] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.591414] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.591707] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 999.591959] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60938b7c-d338-4671-b8b8-d244275ff959 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.594327] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7eb710f5-b30c-4205-bc73-4bf826f8ac86 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.595337] env[68569]: DEBUG oslo_concurrency.lockutils [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.091s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.595553] env[68569]: DEBUG nova.objects.instance [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lazy-loading 'resources' on Instance uuid 398dd3c7-c630-4a29-b204-80f6fb394ce8 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.599658] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 999.599658] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5252ebec-76bb-9830-f2a3-39242d3433bc" [ 999.599658] env[68569]: _type = "Task" [ 999.599658] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.608695] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5252ebec-76bb-9830-f2a3-39242d3433bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.967327] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.990898] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167473, 'name': CloneVM_Task, 'duration_secs': 0.876537} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.991179] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Created linked-clone VM from snapshot [ 999.991911] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230cf6e5-af14-4a61-9750-2cf7602b928c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.999217] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Uploading image e0d58c70-e9b2-4972-b9da-c050b529130a {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1000.015939] env[68569]: DEBUG nova.network.neutron [req-cd94f4d1-a535-41a9-9a4b-4fde446a3f46 req-d8698427-063b-457f-9acc-798b7d10f547 service nova] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Updated VIF entry in instance network info cache for port ceb81daf-a1b8-41e6-a68d-583d6349ab13. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1000.016336] env[68569]: DEBUG nova.network.neutron [req-cd94f4d1-a535-41a9-9a4b-4fde446a3f46 req-d8698427-063b-457f-9acc-798b7d10f547 service nova] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Updating instance_info_cache with network_info: [{"id": "ceb81daf-a1b8-41e6-a68d-583d6349ab13", "address": "fa:16:3e:90:51:17", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapceb81daf-a1", "ovs_interfaceid": "ceb81daf-a1b8-41e6-a68d-583d6349ab13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.019127] env[68569]: DEBUG oslo_vmware.rw_handles [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1000.019127] env[68569]: value = "vm-633673" [ 1000.019127] env[68569]: _type = "VirtualMachine" [ 1000.019127] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1000.019417] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-73fd9730-8081-4d46-9d2e-3fef735fb077 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.026145] env[68569]: DEBUG oslo_vmware.rw_handles [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lease: (returnval){ [ 1000.026145] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5215fff2-76f3-b9b6-5d01-2827abee88be" [ 1000.026145] env[68569]: _type = "HttpNfcLease" [ 1000.026145] env[68569]: } obtained for exporting VM: (result){ [ 1000.026145] env[68569]: value = "vm-633673" [ 1000.026145] env[68569]: _type = "VirtualMachine" [ 1000.026145] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1000.026421] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the lease: (returnval){ [ 1000.026421] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5215fff2-76f3-b9b6-5d01-2827abee88be" [ 1000.026421] env[68569]: _type = "HttpNfcLease" [ 1000.026421] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1000.033572] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1000.033572] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5215fff2-76f3-b9b6-5d01-2827abee88be" [ 1000.033572] env[68569]: _type = "HttpNfcLease" [ 1000.033572] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1000.095242] env[68569]: DEBUG nova.compute.manager [req-aaabe85b-6b99-44f1-b4d7-0ba1fe968c76 req-363e9b6f-8a82-4a65-b2a6-6b737b55b04f service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Received event network-changed-d13b7716-5b05-4896-9da9-e3674d55a3a0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1000.095499] env[68569]: DEBUG nova.compute.manager [req-aaabe85b-6b99-44f1-b4d7-0ba1fe968c76 req-363e9b6f-8a82-4a65-b2a6-6b737b55b04f service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Refreshing instance network info cache due to event network-changed-d13b7716-5b05-4896-9da9-e3674d55a3a0. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1000.095719] env[68569]: DEBUG oslo_concurrency.lockutils [req-aaabe85b-6b99-44f1-b4d7-0ba1fe968c76 req-363e9b6f-8a82-4a65-b2a6-6b737b55b04f service nova] Acquiring lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.095866] env[68569]: DEBUG oslo_concurrency.lockutils [req-aaabe85b-6b99-44f1-b4d7-0ba1fe968c76 req-363e9b6f-8a82-4a65-b2a6-6b737b55b04f service nova] Acquired lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.096138] env[68569]: DEBUG nova.network.neutron [req-aaabe85b-6b99-44f1-b4d7-0ba1fe968c76 req-363e9b6f-8a82-4a65-b2a6-6b737b55b04f service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Refreshing network info cache for port d13b7716-5b05-4896-9da9-e3674d55a3a0 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1000.111804] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5252ebec-76bb-9830-f2a3-39242d3433bc, 'name': SearchDatastore_Task, 'duration_secs': 0.009865} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.112098] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.112723] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1000.112723] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.112723] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.112929] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1000.113964] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-947cc3bc-b205-40cb-b8e8-0e3cdacb294b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.122384] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1000.122564] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1000.123259] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d570ece5-dc85-45b1-ae21-48e0b5994d7e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.128384] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1000.128384] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52aaaad2-23fe-8014-0096-8efb413630a4" [ 1000.128384] env[68569]: _type = "Task" [ 1000.128384] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.138464] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52aaaad2-23fe-8014-0096-8efb413630a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.267308] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34084312-25dd-4cd5-ba48-0b33f76ce30e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.275078] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09263021-7c17-4ac2-8ba5-66392f6823e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.306231] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60335146-32f3-4ac9-a835-03db97894299 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.313459] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99835b11-7b9f-4e87-b72a-f077fafe5cd5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.335523] env[68569]: DEBUG nova.compute.provider_tree [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.522599] env[68569]: DEBUG oslo_concurrency.lockutils [req-cd94f4d1-a535-41a9-9a4b-4fde446a3f46 req-d8698427-063b-457f-9acc-798b7d10f547 service nova] Releasing lock "refresh_cache-281d7077-391a-4cce-9d31-af41568a2b7c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.536499] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1000.536499] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5215fff2-76f3-b9b6-5d01-2827abee88be" [ 1000.536499] env[68569]: _type = "HttpNfcLease" [ 1000.536499] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1000.537939] env[68569]: DEBUG oslo_vmware.rw_handles [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1000.537939] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5215fff2-76f3-b9b6-5d01-2827abee88be" [ 1000.537939] env[68569]: _type = "HttpNfcLease" [ 1000.537939] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1000.537939] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd17dc01-3a26-4bc2-9956-0c571a0250dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.545742] env[68569]: DEBUG oslo_vmware.rw_handles [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52027dc1-f649-2e23-8c21-f91e27d462ea/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1000.546089] env[68569]: DEBUG oslo_vmware.rw_handles [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52027dc1-f649-2e23-8c21-f91e27d462ea/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1000.638813] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52aaaad2-23fe-8014-0096-8efb413630a4, 'name': SearchDatastore_Task, 'duration_secs': 0.011143} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.642088] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17e3af9b-129f-47e9-ab14-baf7235b23d6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.647258] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1000.647258] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5289396d-a7b0-821d-7fe7-492683cd92ce" [ 1000.647258] env[68569]: _type = "Task" [ 1000.647258] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.652810] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-10256da0-41b9-4d70-bea9-f1fdf81989c2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.661535] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5289396d-a7b0-821d-7fe7-492683cd92ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.840917] env[68569]: DEBUG nova.scheduler.client.report [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1000.940144] env[68569]: DEBUG nova.network.neutron [req-aaabe85b-6b99-44f1-b4d7-0ba1fe968c76 req-363e9b6f-8a82-4a65-b2a6-6b737b55b04f service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Updated VIF entry in instance network info cache for port d13b7716-5b05-4896-9da9-e3674d55a3a0. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1000.940510] env[68569]: DEBUG nova.network.neutron [req-aaabe85b-6b99-44f1-b4d7-0ba1fe968c76 req-363e9b6f-8a82-4a65-b2a6-6b737b55b04f service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Updating instance_info_cache with network_info: [{"id": "d13b7716-5b05-4896-9da9-e3674d55a3a0", "address": "fa:16:3e:c3:61:8b", "network": {"id": "63b6f3d4-66a9-477c-b7a6-566cf0d9ab2d", "bridge": null, "label": "tempest-ServersNegativeTestJSON-638108957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b924e47d91a488e9d912a5369792aa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd13b7716-5b", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.160711] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5289396d-a7b0-821d-7fe7-492683cd92ce, 'name': SearchDatastore_Task, 'duration_secs': 0.011589} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.161238] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.161587] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 281d7077-391a-4cce-9d31-af41568a2b7c/281d7077-391a-4cce-9d31-af41568a2b7c.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1001.161851] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff18fcd0-24cc-4da4-a4db-d85a6ab4444f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.169532] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1001.169532] env[68569]: value = "task-3167476" [ 1001.169532] env[68569]: _type = "Task" [ 1001.169532] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.178207] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167476, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.351276] env[68569]: DEBUG oslo_concurrency.lockutils [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.756s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.354265] env[68569]: DEBUG oslo_concurrency.lockutils [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.709s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.354265] env[68569]: DEBUG nova.objects.instance [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lazy-loading 'resources' on Instance uuid 0dc5da15-4c10-4754-ac82-a130b933295d {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.376693] env[68569]: INFO nova.scheduler.client.report [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Deleted allocations for instance 398dd3c7-c630-4a29-b204-80f6fb394ce8 [ 1001.443690] env[68569]: DEBUG oslo_concurrency.lockutils [req-aaabe85b-6b99-44f1-b4d7-0ba1fe968c76 req-363e9b6f-8a82-4a65-b2a6-6b737b55b04f service nova] Releasing lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.687720] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167476, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487127} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.688067] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 281d7077-391a-4cce-9d31-af41568a2b7c/281d7077-391a-4cce-9d31-af41568a2b7c.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1001.688412] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1001.688741] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-94ddb066-d464-4cf3-b718-fb2739a48f3c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.695893] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1001.695893] env[68569]: value = "task-3167477" [ 1001.695893] env[68569]: _type = "Task" [ 1001.695893] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.713403] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167477, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.895351] env[68569]: DEBUG oslo_concurrency.lockutils [None req-74a45e19-9938-4e3b-8431-1ddc048b4dbd tempest-ServersAdminTestJSON-1415471422 tempest-ServersAdminTestJSON-1415471422-project-member] Lock "398dd3c7-c630-4a29-b204-80f6fb394ce8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.113s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.089149] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace29421-f659-4bd0-818e-2a7637520463 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.097317] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ea6591-8aaf-4c2b-a6a1-a01edcc92fc0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.132546] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16c61ce-a435-42a2-94cd-99af41d4730d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.143187] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7637898f-3d2d-4af4-989a-3951153a48b0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.159572] env[68569]: DEBUG nova.compute.provider_tree [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.206293] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167477, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065208} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.206657] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1002.207360] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde779a2-977a-4fa1-b249-0aaa9847d4e5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.210193] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.230282] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 281d7077-391a-4cce-9d31-af41568a2b7c/281d7077-391a-4cce-9d31-af41568a2b7c.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1002.231027] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c68abab-3dde-4adc-a523-0ddce2dabe60 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.251730] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1002.251730] env[68569]: value = "task-3167478" [ 1002.251730] env[68569]: _type = "Task" [ 1002.251730] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.259824] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167478, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.665533] env[68569]: DEBUG nova.scheduler.client.report [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1002.763536] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167478, 'name': ReconfigVM_Task, 'duration_secs': 0.379906} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.763898] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 281d7077-391a-4cce-9d31-af41568a2b7c/281d7077-391a-4cce-9d31-af41568a2b7c.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1002.764965] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c170e6e3-e4e1-45e3-812e-3f7223bb0394 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.771425] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1002.771425] env[68569]: value = "task-3167479" [ 1002.771425] env[68569]: _type = "Task" [ 1002.771425] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.779639] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167479, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.171399] env[68569]: DEBUG oslo_concurrency.lockutils [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.817s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.175213] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.309s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.179351] env[68569]: INFO nova.compute.claims [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1003.202891] env[68569]: INFO nova.scheduler.client.report [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleted allocations for instance 0dc5da15-4c10-4754-ac82-a130b933295d [ 1003.285796] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167479, 'name': Rename_Task, 'duration_secs': 0.194439} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.285796] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1003.285796] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fac0dacd-9d2b-4095-863a-edc1d05dc7fa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.291199] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1003.291199] env[68569]: value = "task-3167480" [ 1003.291199] env[68569]: _type = "Task" [ 1003.291199] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.303269] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167480, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.711027] env[68569]: DEBUG oslo_concurrency.lockutils [None req-81a114d0-1931-48ce-b7b1-a6e2607cd56f tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "0dc5da15-4c10-4754-ac82-a130b933295d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.979s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.807699] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167480, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.309748] env[68569]: DEBUG oslo_vmware.api [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167480, 'name': PowerOnVM_Task, 'duration_secs': 0.580383} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.310125] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1004.310341] env[68569]: INFO nova.compute.manager [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Took 7.26 seconds to spawn the instance on the hypervisor. [ 1004.310520] env[68569]: DEBUG nova.compute.manager [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1004.312119] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6f4961-247f-4720-a56f-891520ad6003 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.432925] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f04bd6-3f4c-4a63-ae90-34884f2ed06c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.443917] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7625518f-4e27-4518-a548-46bbac2f5513 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.478928] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9be3aa-b880-432d-8586-81775907a68e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.487646] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137b366d-b150-4797-a5e7-47bc31c79bdb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.501643] env[68569]: DEBUG nova.compute.provider_tree [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.842136] env[68569]: INFO nova.compute.manager [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Took 26.05 seconds to build instance. [ 1005.004576] env[68569]: DEBUG nova.scheduler.client.report [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1005.344439] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b5f5cc1e-6e97-4abe-8293-d3c9988b760d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "281d7077-391a-4cce-9d31-af41568a2b7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.561s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.514854] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.340s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.517959] env[68569]: DEBUG nova.compute.manager [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1005.523035] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.310s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.524751] env[68569]: INFO nova.compute.claims [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1006.030625] env[68569]: DEBUG nova.compute.utils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1006.035885] env[68569]: DEBUG nova.compute.manager [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1006.036096] env[68569]: DEBUG nova.network.neutron [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1006.118809] env[68569]: DEBUG nova.policy [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e16bef4b0a6d4a5e937e4f3c4a3329b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67e07f7ab9ab41feb4d71e1d128d093d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1006.130912] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.130912] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.232818] env[68569]: DEBUG nova.compute.manager [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Stashing vm_state: active {{(pid=68569) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1006.511148] env[68569]: DEBUG nova.network.neutron [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Successfully created port: 9c464830-8fd8-4131-b5a0-3b163477168a {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1006.536093] env[68569]: DEBUG nova.compute.manager [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1006.635491] env[68569]: DEBUG nova.compute.manager [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1006.754007] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.830964] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ffe84b7-576f-450d-849b-d895b4db57e3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.843022] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bfafba9-a6a0-4c68-b82a-d16055b90c74 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.885992] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ebec55-bd01-41bc-8ac5-b13c8495c37e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.894835] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afbc107e-fefc-4786-81a2-d3da668ff7f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.908550] env[68569]: DEBUG nova.compute.provider_tree [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.995141] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "a7145443-aacb-4d9e-8e39-3741d0630849" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.995510] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a7145443-aacb-4d9e-8e39-3741d0630849" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.163245] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.412397] env[68569]: DEBUG nova.scheduler.client.report [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1007.500642] env[68569]: DEBUG nova.compute.manager [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1007.551746] env[68569]: DEBUG nova.compute.manager [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1007.578843] env[68569]: DEBUG nova.virt.hardware [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1007.579097] env[68569]: DEBUG nova.virt.hardware [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1007.579354] env[68569]: DEBUG nova.virt.hardware [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1007.579471] env[68569]: DEBUG nova.virt.hardware [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1007.579617] env[68569]: DEBUG nova.virt.hardware [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1007.579762] env[68569]: DEBUG nova.virt.hardware [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1007.579965] env[68569]: DEBUG nova.virt.hardware [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1007.580147] env[68569]: DEBUG nova.virt.hardware [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1007.580331] env[68569]: DEBUG nova.virt.hardware [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1007.580495] env[68569]: DEBUG nova.virt.hardware [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1007.580667] env[68569]: DEBUG nova.virt.hardware [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1007.582122] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca630352-eba9-4727-925f-23c7e5b3eceb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.591332] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc8b068-a01c-42f7-a803-bb8e8457dba0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.917908] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.918261] env[68569]: DEBUG nova.compute.manager [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1007.920884] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 11.026s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.021896] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.070836] env[68569]: DEBUG nova.compute.manager [req-74b0a754-d125-4881-9732-f53193a407eb req-f88297f1-5213-4120-9c93-b103cf25f6bd service nova] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Received event network-vif-plugged-9c464830-8fd8-4131-b5a0-3b163477168a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1008.071063] env[68569]: DEBUG oslo_concurrency.lockutils [req-74b0a754-d125-4881-9732-f53193a407eb req-f88297f1-5213-4120-9c93-b103cf25f6bd service nova] Acquiring lock "705bbc03-30ac-4d5b-a3f0-6505171a69fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.071288] env[68569]: DEBUG oslo_concurrency.lockutils [req-74b0a754-d125-4881-9732-f53193a407eb req-f88297f1-5213-4120-9c93-b103cf25f6bd service nova] Lock "705bbc03-30ac-4d5b-a3f0-6505171a69fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.071446] env[68569]: DEBUG oslo_concurrency.lockutils [req-74b0a754-d125-4881-9732-f53193a407eb req-f88297f1-5213-4120-9c93-b103cf25f6bd service nova] Lock "705bbc03-30ac-4d5b-a3f0-6505171a69fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.071615] env[68569]: DEBUG nova.compute.manager [req-74b0a754-d125-4881-9732-f53193a407eb req-f88297f1-5213-4120-9c93-b103cf25f6bd service nova] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] No waiting events found dispatching network-vif-plugged-9c464830-8fd8-4131-b5a0-3b163477168a {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1008.071796] env[68569]: WARNING nova.compute.manager [req-74b0a754-d125-4881-9732-f53193a407eb req-f88297f1-5213-4120-9c93-b103cf25f6bd service nova] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Received unexpected event network-vif-plugged-9c464830-8fd8-4131-b5a0-3b163477168a for instance with vm_state building and task_state spawning. [ 1008.156669] env[68569]: DEBUG nova.network.neutron [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Successfully updated port: 9c464830-8fd8-4131-b5a0-3b163477168a {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1008.280035] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "7b10cfb4-dc0a-4311-a24f-7a25869ef594" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.280233] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "7b10cfb4-dc0a-4311-a24f-7a25869ef594" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.423938] env[68569]: DEBUG nova.compute.utils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1008.430960] env[68569]: DEBUG nova.compute.manager [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1008.431147] env[68569]: DEBUG nova.network.neutron [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1008.482134] env[68569]: DEBUG nova.policy [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '330a73f609f746d8b8c1a7eefe557c69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'decd2576711b41bbb25300d9db62643e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1008.632937] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.633240] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.659904] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "refresh_cache-705bbc03-30ac-4d5b-a3f0-6505171a69fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.660101] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "refresh_cache-705bbc03-30ac-4d5b-a3f0-6505171a69fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.660248] env[68569]: DEBUG nova.network.neutron [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1008.776432] env[68569]: DEBUG nova.network.neutron [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Successfully created port: d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1008.785708] env[68569]: DEBUG nova.compute.manager [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1008.934926] env[68569]: DEBUG nova.compute.manager [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1008.939501] env[68569]: INFO nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Updating resource usage from migration 08357d8f-bef1-4555-884e-6cf2c811f256 [ 1008.970033] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance a9e87dfc-6e00-4e55-8a8f-bc3174b991da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1008.970211] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance fd803a5e-8dbd-449e-b45d-1e6410a286e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1008.970359] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 6824efd5-427b-420d-83d5-a1d5acd94bf9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1008.970539] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 61aa0997-ffa6-4551-bdaa-132026e240f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1008.970593] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance db75de86-9dda-42b2-9e7a-55e2ba5adad1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1008.970806] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance f5f8b054-7ee4-40f5-84de-1cee02949cd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1008.970806] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1008.970976] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 1689f1f3-53f2-4c02-a969-e4dae21f14b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1008.971075] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance ae92919c-f2eb-4231-afea-b23269e09a0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1008.971138] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 705bbc03-30ac-4d5b-a3f0-6505171a69fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1008.971244] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 1670f03a-94e2-4005-be7e-41aad61a8925 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1008.971395] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Instance with task_state "unshelving" is not being actively managed by this compute host but has allocations referencing this compute node (a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1780}} [ 1008.971514] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Migration 08357d8f-bef1-4555-884e-6cf2c811f256 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1008.971623] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 281d7077-391a-4cce-9d31-af41568a2b7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1009.137023] env[68569]: DEBUG nova.compute.manager [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1009.225735] env[68569]: DEBUG nova.network.neutron [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1009.308664] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.475242] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.500029] env[68569]: DEBUG nova.network.neutron [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Updating instance_info_cache with network_info: [{"id": "9c464830-8fd8-4131-b5a0-3b163477168a", "address": "fa:16:3e:b4:79:ef", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c464830-8f", "ovs_interfaceid": "9c464830-8fd8-4131-b5a0-3b163477168a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.659489] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.954611] env[68569]: DEBUG nova.compute.manager [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1009.979036] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance a7145443-aacb-4d9e-8e39-3741d0630849 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.986266] env[68569]: DEBUG nova.virt.hardware [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1009.986627] env[68569]: DEBUG nova.virt.hardware [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1009.986874] env[68569]: DEBUG nova.virt.hardware [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1009.987147] env[68569]: DEBUG nova.virt.hardware [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1009.987356] env[68569]: DEBUG nova.virt.hardware [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1009.987568] env[68569]: DEBUG nova.virt.hardware [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1009.987977] env[68569]: DEBUG nova.virt.hardware [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1009.988221] env[68569]: DEBUG nova.virt.hardware [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1009.988472] env[68569]: DEBUG nova.virt.hardware [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1009.988705] env[68569]: DEBUG nova.virt.hardware [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1009.988966] env[68569]: DEBUG nova.virt.hardware [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1009.989933] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d88740-0f78-4e9a-ae0a-54c5328a8a67 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.998883] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e30884c-9a05-47e7-91c0-8f24ade39d04 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.003323] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "refresh_cache-705bbc03-30ac-4d5b-a3f0-6505171a69fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.003739] env[68569]: DEBUG nova.compute.manager [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Instance network_info: |[{"id": "9c464830-8fd8-4131-b5a0-3b163477168a", "address": "fa:16:3e:b4:79:ef", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c464830-8f", "ovs_interfaceid": "9c464830-8fd8-4131-b5a0-3b163477168a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1010.004187] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:79:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54c45719-5690-47bf-b45b-6cad9813071e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9c464830-8fd8-4131-b5a0-3b163477168a', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1010.012875] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1010.014279] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1010.014580] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56588c71-2d3d-46b7-981e-9d955625aa62 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.049036] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1010.049036] env[68569]: value = "task-3167481" [ 1010.049036] env[68569]: _type = "Task" [ 1010.049036] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.055361] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167481, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.104624] env[68569]: DEBUG nova.compute.manager [req-231a218d-374b-4865-b8a8-d1845b23f922 req-a248dd66-ed93-4e13-89ca-9f0dab1665c2 service nova] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Received event network-changed-9c464830-8fd8-4131-b5a0-3b163477168a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1010.104893] env[68569]: DEBUG nova.compute.manager [req-231a218d-374b-4865-b8a8-d1845b23f922 req-a248dd66-ed93-4e13-89ca-9f0dab1665c2 service nova] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Refreshing instance network info cache due to event network-changed-9c464830-8fd8-4131-b5a0-3b163477168a. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1010.105146] env[68569]: DEBUG oslo_concurrency.lockutils [req-231a218d-374b-4865-b8a8-d1845b23f922 req-a248dd66-ed93-4e13-89ca-9f0dab1665c2 service nova] Acquiring lock "refresh_cache-705bbc03-30ac-4d5b-a3f0-6505171a69fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.105290] env[68569]: DEBUG oslo_concurrency.lockutils [req-231a218d-374b-4865-b8a8-d1845b23f922 req-a248dd66-ed93-4e13-89ca-9f0dab1665c2 service nova] Acquired lock "refresh_cache-705bbc03-30ac-4d5b-a3f0-6505171a69fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.105640] env[68569]: DEBUG nova.network.neutron [req-231a218d-374b-4865-b8a8-d1845b23f922 req-a248dd66-ed93-4e13-89ca-9f0dab1665c2 service nova] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Refreshing network info cache for port 9c464830-8fd8-4131-b5a0-3b163477168a {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1010.212847] env[68569]: DEBUG oslo_vmware.rw_handles [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52027dc1-f649-2e23-8c21-f91e27d462ea/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1010.213875] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b5199d-4262-44aa-ae27-ec26606341da {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.220502] env[68569]: DEBUG oslo_vmware.rw_handles [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52027dc1-f649-2e23-8c21-f91e27d462ea/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1010.220685] env[68569]: ERROR oslo_vmware.rw_handles [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52027dc1-f649-2e23-8c21-f91e27d462ea/disk-0.vmdk due to incomplete transfer. [ 1010.220913] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a65b4c7a-30e0-48ef-b68d-5f5d3ef0916c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.228554] env[68569]: DEBUG oslo_vmware.rw_handles [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52027dc1-f649-2e23-8c21-f91e27d462ea/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1010.228761] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Uploaded image e0d58c70-e9b2-4972-b9da-c050b529130a to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1010.230969] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1010.231235] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b4b78572-188a-4564-bc8a-7794e822435d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.237972] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1010.237972] env[68569]: value = "task-3167482" [ 1010.237972] env[68569]: _type = "Task" [ 1010.237972] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.246378] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167482, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.494094] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 7b10cfb4-dc0a-4311-a24f-7a25869ef594 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1010.536324] env[68569]: DEBUG nova.network.neutron [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Successfully updated port: d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1010.558590] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167481, 'name': CreateVM_Task, 'duration_secs': 0.338514} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.558891] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1010.559720] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.560017] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.560481] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1010.560838] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-935e024f-4c19-4eeb-bdce-0036fb579d8b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.566466] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1010.566466] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f260bb-1e18-7b42-85de-15c593cf8ee6" [ 1010.566466] env[68569]: _type = "Task" [ 1010.566466] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.575419] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f260bb-1e18-7b42-85de-15c593cf8ee6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.750841] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167482, 'name': Destroy_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.917054] env[68569]: DEBUG nova.network.neutron [req-231a218d-374b-4865-b8a8-d1845b23f922 req-a248dd66-ed93-4e13-89ca-9f0dab1665c2 service nova] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Updated VIF entry in instance network info cache for port 9c464830-8fd8-4131-b5a0-3b163477168a. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1010.917480] env[68569]: DEBUG nova.network.neutron [req-231a218d-374b-4865-b8a8-d1845b23f922 req-a248dd66-ed93-4e13-89ca-9f0dab1665c2 service nova] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Updating instance_info_cache with network_info: [{"id": "9c464830-8fd8-4131-b5a0-3b163477168a", "address": "fa:16:3e:b4:79:ef", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c464830-8f", "ovs_interfaceid": "9c464830-8fd8-4131-b5a0-3b163477168a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.998308] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1010.998574] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1010.998746] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2816MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1011.045078] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "refresh_cache-1670f03a-94e2-4005-be7e-41aad61a8925" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.045078] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "refresh_cache-1670f03a-94e2-4005-be7e-41aad61a8925" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.045078] env[68569]: DEBUG nova.network.neutron [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1011.079839] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f260bb-1e18-7b42-85de-15c593cf8ee6, 'name': SearchDatastore_Task, 'duration_secs': 0.00986} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.080171] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1011.080932] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1011.080932] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.080932] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.081182] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1011.081233] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e3814ef-9cdf-4b68-bfda-078a64115280 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.093984] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1011.093984] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1011.093984] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8a38d15-170f-42e2-aaea-cae71d30d029 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.100225] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1011.100225] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c11a08-2249-30f6-0b15-9e34c16e8bc9" [ 1011.100225] env[68569]: _type = "Task" [ 1011.100225] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.109464] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c11a08-2249-30f6-0b15-9e34c16e8bc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.250660] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167482, 'name': Destroy_Task, 'duration_secs': 0.870567} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.250957] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Destroyed the VM [ 1011.251347] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1011.251539] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-91223d08-b014-448b-8feb-6530ae0b438e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.258322] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1011.258322] env[68569]: value = "task-3167483" [ 1011.258322] env[68569]: _type = "Task" [ 1011.258322] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.269106] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167483, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.315023] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4eb327-112b-4168-b967-2e5935f60f9b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.321848] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb090b2d-55a1-4f75-9074-7a513dad07ea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.360949] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b042124-3d07-4f3c-8b9a-e965ce0aa0e2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.370911] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da331ad-6c20-44d9-8dde-8696e9120a34 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.387850] env[68569]: DEBUG nova.compute.provider_tree [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.421637] env[68569]: DEBUG oslo_concurrency.lockutils [req-231a218d-374b-4865-b8a8-d1845b23f922 req-a248dd66-ed93-4e13-89ca-9f0dab1665c2 service nova] Releasing lock "refresh_cache-705bbc03-30ac-4d5b-a3f0-6505171a69fc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1011.612895] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c11a08-2249-30f6-0b15-9e34c16e8bc9, 'name': SearchDatastore_Task, 'duration_secs': 0.027862} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.613676] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7f6d337-fa71-4912-a561-85787bb6b623 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.620829] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1011.620829] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52498e27-5ecc-01b9-3fe2-ee485343288f" [ 1011.620829] env[68569]: _type = "Task" [ 1011.620829] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.629645] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52498e27-5ecc-01b9-3fe2-ee485343288f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.636772] env[68569]: DEBUG nova.network.neutron [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1011.768548] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167483, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.891625] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1011.934334] env[68569]: DEBUG nova.network.neutron [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Updating instance_info_cache with network_info: [{"id": "d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a", "address": "fa:16:3e:48:7a:00", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8c3cd23-66", "ovs_interfaceid": "d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.023593] env[68569]: DEBUG oslo_concurrency.lockutils [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "ae92919c-f2eb-4231-afea-b23269e09a0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.023963] env[68569]: DEBUG oslo_concurrency.lockutils [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "ae92919c-f2eb-4231-afea-b23269e09a0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.024138] env[68569]: DEBUG oslo_concurrency.lockutils [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "ae92919c-f2eb-4231-afea-b23269e09a0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.024330] env[68569]: DEBUG oslo_concurrency.lockutils [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "ae92919c-f2eb-4231-afea-b23269e09a0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.024500] env[68569]: DEBUG oslo_concurrency.lockutils [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "ae92919c-f2eb-4231-afea-b23269e09a0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.026622] env[68569]: INFO nova.compute.manager [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Terminating instance [ 1012.139133] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52498e27-5ecc-01b9-3fe2-ee485343288f, 'name': SearchDatastore_Task, 'duration_secs': 0.010562} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.139133] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.139133] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 705bbc03-30ac-4d5b-a3f0-6505171a69fc/705bbc03-30ac-4d5b-a3f0-6505171a69fc.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1012.139133] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e73791b1-f505-4be8-9072-f7f9917ed905 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.141760] env[68569]: DEBUG nova.compute.manager [req-d4d81b57-1fd8-4b3c-834a-a45468c317f6 req-acd73d3d-9b1e-42b7-bb64-f77a3300c4e3 service nova] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Received event network-vif-plugged-d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1012.141973] env[68569]: DEBUG oslo_concurrency.lockutils [req-d4d81b57-1fd8-4b3c-834a-a45468c317f6 req-acd73d3d-9b1e-42b7-bb64-f77a3300c4e3 service nova] Acquiring lock "1670f03a-94e2-4005-be7e-41aad61a8925-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.142207] env[68569]: DEBUG oslo_concurrency.lockutils [req-d4d81b57-1fd8-4b3c-834a-a45468c317f6 req-acd73d3d-9b1e-42b7-bb64-f77a3300c4e3 service nova] Lock "1670f03a-94e2-4005-be7e-41aad61a8925-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.142383] env[68569]: DEBUG oslo_concurrency.lockutils [req-d4d81b57-1fd8-4b3c-834a-a45468c317f6 req-acd73d3d-9b1e-42b7-bb64-f77a3300c4e3 service nova] Lock "1670f03a-94e2-4005-be7e-41aad61a8925-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.145065] env[68569]: DEBUG nova.compute.manager [req-d4d81b57-1fd8-4b3c-834a-a45468c317f6 req-acd73d3d-9b1e-42b7-bb64-f77a3300c4e3 service nova] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] No waiting events found dispatching network-vif-plugged-d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1012.145350] env[68569]: WARNING nova.compute.manager [req-d4d81b57-1fd8-4b3c-834a-a45468c317f6 req-acd73d3d-9b1e-42b7-bb64-f77a3300c4e3 service nova] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Received unexpected event network-vif-plugged-d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a for instance with vm_state building and task_state spawning. [ 1012.145596] env[68569]: DEBUG nova.compute.manager [req-d4d81b57-1fd8-4b3c-834a-a45468c317f6 req-acd73d3d-9b1e-42b7-bb64-f77a3300c4e3 service nova] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Received event network-changed-d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1012.146223] env[68569]: DEBUG nova.compute.manager [req-d4d81b57-1fd8-4b3c-834a-a45468c317f6 req-acd73d3d-9b1e-42b7-bb64-f77a3300c4e3 service nova] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Refreshing instance network info cache due to event network-changed-d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1012.146223] env[68569]: DEBUG oslo_concurrency.lockutils [req-d4d81b57-1fd8-4b3c-834a-a45468c317f6 req-acd73d3d-9b1e-42b7-bb64-f77a3300c4e3 service nova] Acquiring lock "refresh_cache-1670f03a-94e2-4005-be7e-41aad61a8925" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.151180] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1012.151180] env[68569]: value = "task-3167484" [ 1012.151180] env[68569]: _type = "Task" [ 1012.151180] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.160379] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.274955] env[68569]: DEBUG oslo_vmware.api [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167483, 'name': RemoveSnapshot_Task, 'duration_secs': 0.863405} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.275421] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1012.275779] env[68569]: INFO nova.compute.manager [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Took 15.41 seconds to snapshot the instance on the hypervisor. [ 1012.396026] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1012.396337] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.475s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.396596] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.429s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.396797] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.399370] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.645s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.437070] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "refresh_cache-1670f03a-94e2-4005-be7e-41aad61a8925" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.437444] env[68569]: DEBUG nova.compute.manager [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Instance network_info: |[{"id": "d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a", "address": "fa:16:3e:48:7a:00", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8c3cd23-66", "ovs_interfaceid": "d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1012.437759] env[68569]: DEBUG oslo_concurrency.lockutils [req-d4d81b57-1fd8-4b3c-834a-a45468c317f6 req-acd73d3d-9b1e-42b7-bb64-f77a3300c4e3 service nova] Acquired lock "refresh_cache-1670f03a-94e2-4005-be7e-41aad61a8925" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.437930] env[68569]: DEBUG nova.network.neutron [req-d4d81b57-1fd8-4b3c-834a-a45468c317f6 req-acd73d3d-9b1e-42b7-bb64-f77a3300c4e3 service nova] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Refreshing network info cache for port d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1012.439577] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:7a:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db328342-7107-4bac-b1d6-111fbd5780f1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd8c3cd23-66ee-4f2c-a7df-7c5321a67e8a', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1012.450642] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1012.451466] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1012.451699] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e32948b-0d73-45ca-b66e-1d9f7ca091f0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.481228] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1012.481228] env[68569]: value = "task-3167485" [ 1012.481228] env[68569]: _type = "Task" [ 1012.481228] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.492182] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167485, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.530802] env[68569]: DEBUG nova.compute.manager [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1012.531230] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1012.532514] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c5d80b-f3dc-4758-a3b4-8705eb830da7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.542386] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1012.542844] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8aa7d99b-4482-4b05-b3e2-21570e69fdc9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.619779] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1012.620042] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1012.620238] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleting the datastore file [datastore1] ae92919c-f2eb-4231-afea-b23269e09a0a {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1012.620593] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a8ee8a4-c3d5-4224-ab1f-379aaef9b8d5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.629520] env[68569]: DEBUG oslo_vmware.api [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1012.629520] env[68569]: value = "task-3167487" [ 1012.629520] env[68569]: _type = "Task" [ 1012.629520] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.637985] env[68569]: DEBUG oslo_vmware.api [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167487, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.663637] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167484, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476248} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.664156] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 705bbc03-30ac-4d5b-a3f0-6505171a69fc/705bbc03-30ac-4d5b-a3f0-6505171a69fc.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1012.664508] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1012.665190] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e3daf7cb-b6df-4bd1-9b24-8f12cd4c2e91 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.674593] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1012.674593] env[68569]: value = "task-3167488" [ 1012.674593] env[68569]: _type = "Task" [ 1012.674593] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.682962] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "671f7e6d-703e-48a9-8509-2a8924afe911" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.683291] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "671f7e6d-703e-48a9-8509-2a8924afe911" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.687925] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167488, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.781110] env[68569]: DEBUG nova.compute.manager [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Instance disappeared during snapshot {{(pid=68569) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1012.802437] env[68569]: DEBUG nova.compute.manager [None req-f1e57312-d7fe-4904-8318-87d49e9b94b1 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Image not found during clean up e0d58c70-e9b2-4972-b9da-c050b529130a {{(pid=68569) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 1012.903635] env[68569]: INFO nova.compute.claims [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1012.914112] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69073084-a60f-4cfd-a6d9-a165f44e64c7 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 34.656s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.914992] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 10.705s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.915190] env[68569]: INFO nova.compute.manager [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Unshelving [ 1012.994738] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167485, 'name': CreateVM_Task, 'duration_secs': 0.35601} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.994975] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1012.996171] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.996365] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.996679] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1012.996937] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-831a778a-77a4-40e4-a891-cf37f964c604 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.001830] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1013.001830] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e94e11-7055-7d6d-e6e8-5fed1cf0453c" [ 1013.001830] env[68569]: _type = "Task" [ 1013.001830] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.011664] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e94e11-7055-7d6d-e6e8-5fed1cf0453c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.141783] env[68569]: DEBUG oslo_vmware.api [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167487, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154566} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.142049] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1013.142225] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1013.142391] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1013.142550] env[68569]: INFO nova.compute.manager [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1013.142776] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1013.142988] env[68569]: DEBUG nova.compute.manager [-] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1013.143782] env[68569]: DEBUG nova.network.neutron [-] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1013.184749] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167488, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075923} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.185029] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1013.185818] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebb1bc9-bb6e-4ccd-83fa-6859f7bbb4bf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.190321] env[68569]: DEBUG nova.compute.manager [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1013.216027] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 705bbc03-30ac-4d5b-a3f0-6505171a69fc/705bbc03-30ac-4d5b-a3f0-6505171a69fc.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1013.216991] env[68569]: DEBUG nova.network.neutron [req-d4d81b57-1fd8-4b3c-834a-a45468c317f6 req-acd73d3d-9b1e-42b7-bb64-f77a3300c4e3 service nova] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Updated VIF entry in instance network info cache for port d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1013.217353] env[68569]: DEBUG nova.network.neutron [req-d4d81b57-1fd8-4b3c-834a-a45468c317f6 req-acd73d3d-9b1e-42b7-bb64-f77a3300c4e3 service nova] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Updating instance_info_cache with network_info: [{"id": "d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a", "address": "fa:16:3e:48:7a:00", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8c3cd23-66", "ovs_interfaceid": "d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.218507] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f31d8ad5-f3ef-4005-a70e-8558624e2ffc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.235219] env[68569]: DEBUG oslo_concurrency.lockutils [req-d4d81b57-1fd8-4b3c-834a-a45468c317f6 req-acd73d3d-9b1e-42b7-bb64-f77a3300c4e3 service nova] Releasing lock "refresh_cache-1670f03a-94e2-4005-be7e-41aad61a8925" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.242034] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1013.242034] env[68569]: value = "task-3167489" [ 1013.242034] env[68569]: _type = "Task" [ 1013.242034] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.253223] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167489, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.260950] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "4c122cff-f64c-4e4f-9454-034c44ff246b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.261216] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "4c122cff-f64c-4e4f-9454-034c44ff246b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.416771] env[68569]: INFO nova.compute.resource_tracker [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Updating resource usage from migration 08357d8f-bef1-4555-884e-6cf2c811f256 [ 1013.451013] env[68569]: DEBUG nova.compute.manager [req-35671623-f2ce-4f3b-8109-ed426169bd48 req-b62eacb1-0bda-4467-8b5c-56aa51c9134e service nova] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Received event network-vif-deleted-5de82b0c-a595-4eb1-89af-5ab6517061ad {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1013.451013] env[68569]: INFO nova.compute.manager [req-35671623-f2ce-4f3b-8109-ed426169bd48 req-b62eacb1-0bda-4467-8b5c-56aa51c9134e service nova] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Neutron deleted interface 5de82b0c-a595-4eb1-89af-5ab6517061ad; detaching it from the instance and deleting it from the info cache [ 1013.451013] env[68569]: DEBUG nova.network.neutron [req-35671623-f2ce-4f3b-8109-ed426169bd48 req-b62eacb1-0bda-4467-8b5c-56aa51c9134e service nova] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.517655] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e94e11-7055-7d6d-e6e8-5fed1cf0453c, 'name': SearchDatastore_Task, 'duration_secs': 0.012026} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.521178] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.521178] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1013.521273] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.521436] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1013.521680] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1013.522032] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8eb4fc47-18a9-4f7a-a3af-fa33a07cce0b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.530467] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1013.530734] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1013.531573] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-161ec53f-9988-48de-9b87-da5df7a9e8de {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.539438] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1013.539438] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b62fca-6ac6-bf80-f5af-a3169638585c" [ 1013.539438] env[68569]: _type = "Task" [ 1013.539438] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.549488] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b62fca-6ac6-bf80-f5af-a3169638585c, 'name': SearchDatastore_Task, 'duration_secs': 0.00968} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.550246] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66971c84-d718-4305-ab85-c99bd4e249e6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.557317] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1013.557317] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5216f5f7-8411-4853-db57-35db99cf958a" [ 1013.557317] env[68569]: _type = "Task" [ 1013.557317] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.564797] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5216f5f7-8411-4853-db57-35db99cf958a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.711374] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.715066] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae05957-bbef-4904-a1fc-097ed2a7bc36 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.725035] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b41da30-d43b-43f8-a58b-b52037eda3be {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.757506] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1cc10a-ff22-40b9-afbf-9722b1a95617 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.766260] env[68569]: DEBUG nova.compute.manager [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1013.769014] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167489, 'name': ReconfigVM_Task, 'duration_secs': 0.302756} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.770436] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d16def-e950-4d2e-bd1f-aa6831adc5ac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.773939] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 705bbc03-30ac-4d5b-a3f0-6505171a69fc/705bbc03-30ac-4d5b-a3f0-6505171a69fc.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1013.774514] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c779d4f9-d3d9-4184-b924-744647601ede {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.786054] env[68569]: DEBUG nova.compute.provider_tree [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.788281] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1013.788281] env[68569]: value = "task-3167490" [ 1013.788281] env[68569]: _type = "Task" [ 1013.788281] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.798323] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167490, 'name': Rename_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.921951] env[68569]: DEBUG nova.network.neutron [-] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.944150] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.956982] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-128d7932-fa70-46cb-af66-254bd10d8f7a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.966200] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ad9ead-c4b8-4028-aa10-0dc39b323b92 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.997820] env[68569]: DEBUG nova.compute.manager [req-35671623-f2ce-4f3b-8109-ed426169bd48 req-b62eacb1-0bda-4467-8b5c-56aa51c9134e service nova] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Detach interface failed, port_id=5de82b0c-a595-4eb1-89af-5ab6517061ad, reason: Instance ae92919c-f2eb-4231-afea-b23269e09a0a could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1014.067035] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5216f5f7-8411-4853-db57-35db99cf958a, 'name': SearchDatastore_Task, 'duration_secs': 0.009749} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.067328] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.067568] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 1670f03a-94e2-4005-be7e-41aad61a8925/1670f03a-94e2-4005-be7e-41aad61a8925.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1014.067814] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c55d173-e0b4-4ee7-adef-e3e5d695639c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.074038] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1014.074038] env[68569]: value = "task-3167491" [ 1014.074038] env[68569]: _type = "Task" [ 1014.074038] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.081524] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167491, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.285075] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.290245] env[68569]: DEBUG nova.scheduler.client.report [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1014.302654] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167490, 'name': Rename_Task, 'duration_secs': 0.160015} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.302996] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1014.303303] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92fa779a-cfcd-4ba1-b3a6-da722621a4ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.311054] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1014.311054] env[68569]: value = "task-3167492" [ 1014.311054] env[68569]: _type = "Task" [ 1014.311054] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.320287] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167492, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.424494] env[68569]: INFO nova.compute.manager [-] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Took 1.28 seconds to deallocate network for instance. [ 1014.583864] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167491, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476904} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.584147] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 1670f03a-94e2-4005-be7e-41aad61a8925/1670f03a-94e2-4005-be7e-41aad61a8925.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1014.584364] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1014.584612] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-430c16ec-ba4f-46dc-b511-3d4c8a827eec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.591600] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1014.591600] env[68569]: value = "task-3167493" [ 1014.591600] env[68569]: _type = "Task" [ 1014.591600] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.599405] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167493, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.797798] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.398s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.798116] env[68569]: INFO nova.compute.manager [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Migrating [ 1014.804525] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.642s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.806087] env[68569]: INFO nova.compute.claims [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1014.826021] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167492, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.932023] env[68569]: DEBUG oslo_concurrency.lockutils [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.103356] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167493, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069111} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.103657] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1015.104450] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97bf4545-8ccb-4c6b-81bf-2ed87e8ff7f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.127230] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 1670f03a-94e2-4005-be7e-41aad61a8925/1670f03a-94e2-4005-be7e-41aad61a8925.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1015.127517] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-585c7701-5a52-4b2f-832d-f67e7c3d4c2c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.146981] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1015.146981] env[68569]: value = "task-3167494" [ 1015.146981] env[68569]: _type = "Task" [ 1015.146981] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.154770] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167494, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.319451] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "refresh_cache-281d7077-391a-4cce-9d31-af41568a2b7c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.319762] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "refresh_cache-281d7077-391a-4cce-9d31-af41568a2b7c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.319981] env[68569]: DEBUG nova.network.neutron [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1015.331379] env[68569]: DEBUG oslo_vmware.api [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167492, 'name': PowerOnVM_Task, 'duration_secs': 0.660541} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.332595] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1015.332836] env[68569]: INFO nova.compute.manager [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Took 7.78 seconds to spawn the instance on the hypervisor. [ 1015.333067] env[68569]: DEBUG nova.compute.manager [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1015.333909] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38edf18a-c553-4b1f-9c11-2de4cbaa5507 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.657154] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167494, 'name': ReconfigVM_Task, 'duration_secs': 0.292447} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.657455] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 1670f03a-94e2-4005-be7e-41aad61a8925/1670f03a-94e2-4005-be7e-41aad61a8925.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1015.658102] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d08f77ea-18cb-4850-9afe-fdc0a8d8ef97 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.665030] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1015.665030] env[68569]: value = "task-3167495" [ 1015.665030] env[68569]: _type = "Task" [ 1015.665030] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.672548] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167495, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.854360] env[68569]: INFO nova.compute.manager [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Took 24.01 seconds to build instance. [ 1016.115706] env[68569]: DEBUG nova.network.neutron [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Updating instance_info_cache with network_info: [{"id": "ceb81daf-a1b8-41e6-a68d-583d6349ab13", "address": "fa:16:3e:90:51:17", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapceb81daf-a1", "ovs_interfaceid": "ceb81daf-a1b8-41e6-a68d-583d6349ab13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.120018] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa54ff4-f83a-404c-8377-a32b45681bad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.127524] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7229cadc-bfc4-490a-803d-3805c01dadb2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.163020] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8c7c44-6796-4402-9a2d-b75941aac87e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.171749] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e273a99-4726-4004-8717-55c4e7eae298 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.180988] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167495, 'name': Rename_Task, 'duration_secs': 0.148089} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.189055] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1016.189162] env[68569]: DEBUG nova.compute.provider_tree [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.190849] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ecc5cc90-6fe1-43b3-8b51-2154308fa32b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.198926] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1016.198926] env[68569]: value = "task-3167496" [ 1016.198926] env[68569]: _type = "Task" [ 1016.198926] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.207079] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167496, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.356978] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e5fdc3a-ad0b-4897-8ed2-cc46e9191d1a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "705bbc03-30ac-4d5b-a3f0-6505171a69fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.524s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.413535] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "705bbc03-30ac-4d5b-a3f0-6505171a69fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.413858] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "705bbc03-30ac-4d5b-a3f0-6505171a69fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.414101] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "705bbc03-30ac-4d5b-a3f0-6505171a69fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.414293] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "705bbc03-30ac-4d5b-a3f0-6505171a69fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.414465] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "705bbc03-30ac-4d5b-a3f0-6505171a69fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.416892] env[68569]: INFO nova.compute.manager [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Terminating instance [ 1016.618468] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "refresh_cache-281d7077-391a-4cce-9d31-af41568a2b7c" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.692611] env[68569]: DEBUG nova.scheduler.client.report [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1016.712536] env[68569]: DEBUG oslo_vmware.api [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167496, 'name': PowerOnVM_Task, 'duration_secs': 0.448843} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.713548] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1016.713762] env[68569]: INFO nova.compute.manager [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Took 6.76 seconds to spawn the instance on the hypervisor. [ 1016.713935] env[68569]: DEBUG nova.compute.manager [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1016.714860] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51eeb6aa-9602-4209-8340-11efd23a494a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.921493] env[68569]: DEBUG nova.compute.manager [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1016.921784] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1016.922468] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b322f620-d39f-441c-852d-f3788f36588b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.929928] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1016.930176] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4f2622c-38fd-4f6d-aab4-539b2e7c4cda {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.936026] env[68569]: DEBUG oslo_vmware.api [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1016.936026] env[68569]: value = "task-3167497" [ 1016.936026] env[68569]: _type = "Task" [ 1016.936026] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.944526] env[68569]: DEBUG oslo_vmware.api [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167497, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.197597] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.393s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.198365] env[68569]: DEBUG nova.compute.manager [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1017.200954] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.179s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.202387] env[68569]: INFO nova.compute.claims [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1017.234778] env[68569]: INFO nova.compute.manager [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Took 23.04 seconds to build instance. [ 1017.446054] env[68569]: DEBUG oslo_vmware.api [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167497, 'name': PowerOffVM_Task, 'duration_secs': 0.220137} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.446241] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1017.446396] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1017.446653] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cbadbe8e-ea0c-4a23-baa9-5e60b9e324fa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.506803] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1017.507053] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1017.507243] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleting the datastore file [datastore1] 705bbc03-30ac-4d5b-a3f0-6505171a69fc {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1017.507538] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd16871e-de72-4b8d-b807-865801a46044 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.513969] env[68569]: DEBUG oslo_vmware.api [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1017.513969] env[68569]: value = "task-3167499" [ 1017.513969] env[68569]: _type = "Task" [ 1017.513969] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.522068] env[68569]: DEBUG oslo_vmware.api [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167499, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.707071] env[68569]: DEBUG nova.compute.utils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1017.709855] env[68569]: DEBUG nova.compute.manager [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1017.710071] env[68569]: DEBUG nova.network.neutron [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1017.736431] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48d5c545-d826-4826-acc7-d746a5296f0f tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "1670f03a-94e2-4005-be7e-41aad61a8925" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.551s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.747860] env[68569]: DEBUG nova.policy [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54ebbdfe9bfb4854a40b07d60c7a9efb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f361997374e943cfa7a8e4e4884d6c65', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1017.849495] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565d633d-2e0c-458c-b0f5-64c3c3999d0a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.856271] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f378fa97-cf98-4122-89b0-7e9d28adc60d tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Suspending the VM {{(pid=68569) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1017.856511] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-b68afabe-46c5-4156-8b6a-fdfd342b44f2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.862959] env[68569]: DEBUG oslo_vmware.api [None req-f378fa97-cf98-4122-89b0-7e9d28adc60d tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1017.862959] env[68569]: value = "task-3167500" [ 1017.862959] env[68569]: _type = "Task" [ 1017.862959] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.870783] env[68569]: DEBUG oslo_vmware.api [None req-f378fa97-cf98-4122-89b0-7e9d28adc60d tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167500, 'name': SuspendVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.023915] env[68569]: DEBUG oslo_vmware.api [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167499, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33098} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.024329] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1018.024519] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1018.024696] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1018.024871] env[68569]: INFO nova.compute.manager [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1018.025126] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1018.025495] env[68569]: DEBUG nova.compute.manager [-] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1018.025495] env[68569]: DEBUG nova.network.neutron [-] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1018.065799] env[68569]: DEBUG nova.network.neutron [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Successfully created port: f0211609-4c97-40c7-ba53-6f3802350533 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1018.215824] env[68569]: DEBUG nova.compute.manager [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1018.304547] env[68569]: DEBUG nova.compute.manager [req-75314a4e-0eb8-4dc6-b621-9f661af37468 req-3933b302-319c-4379-a91c-a20cfd2ea7e9 service nova] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Received event network-vif-deleted-9c464830-8fd8-4131-b5a0-3b163477168a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1018.304931] env[68569]: INFO nova.compute.manager [req-75314a4e-0eb8-4dc6-b621-9f661af37468 req-3933b302-319c-4379-a91c-a20cfd2ea7e9 service nova] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Neutron deleted interface 9c464830-8fd8-4131-b5a0-3b163477168a; detaching it from the instance and deleting it from the info cache [ 1018.305052] env[68569]: DEBUG nova.network.neutron [req-75314a4e-0eb8-4dc6-b621-9f661af37468 req-3933b302-319c-4379-a91c-a20cfd2ea7e9 service nova] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.375737] env[68569]: DEBUG oslo_vmware.api [None req-f378fa97-cf98-4122-89b0-7e9d28adc60d tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167500, 'name': SuspendVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.532561] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c5250a-c2ba-4568-a523-69fb2ba66901 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.540218] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529d19bd-4ef2-4345-841c-083630bba943 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.573371] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d17905c-06b9-42c8-a41e-d3036dd0dd73 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.581818] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550e334f-4525-4b57-bd10-8c06477ef325 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.595384] env[68569]: DEBUG nova.compute.provider_tree [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.788523] env[68569]: DEBUG nova.network.neutron [-] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.812852] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-242cf52e-210f-4acf-ae46-614bec2e0416 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.822075] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e847916-3240-43b5-b425-d2a2c3e6819e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.852876] env[68569]: DEBUG nova.compute.manager [req-75314a4e-0eb8-4dc6-b621-9f661af37468 req-3933b302-319c-4379-a91c-a20cfd2ea7e9 service nova] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Detach interface failed, port_id=9c464830-8fd8-4131-b5a0-3b163477168a, reason: Instance 705bbc03-30ac-4d5b-a3f0-6505171a69fc could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1018.879122] env[68569]: DEBUG oslo_vmware.api [None req-f378fa97-cf98-4122-89b0-7e9d28adc60d tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167500, 'name': SuspendVM_Task, 'duration_secs': 0.71145} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.881794] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f378fa97-cf98-4122-89b0-7e9d28adc60d tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Suspended the VM {{(pid=68569) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1018.881794] env[68569]: DEBUG nova.compute.manager [None req-f378fa97-cf98-4122-89b0-7e9d28adc60d tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1018.881794] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4752c520-2bd9-4a6b-acae-aa2c86cadf92 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.098988] env[68569]: DEBUG nova.scheduler.client.report [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1019.137920] env[68569]: DEBUG oslo_concurrency.lockutils [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "1689f1f3-53f2-4c02-a969-e4dae21f14b7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.138206] env[68569]: DEBUG oslo_concurrency.lockutils [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "1689f1f3-53f2-4c02-a969-e4dae21f14b7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.138417] env[68569]: DEBUG oslo_concurrency.lockutils [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "1689f1f3-53f2-4c02-a969-e4dae21f14b7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.138599] env[68569]: DEBUG oslo_concurrency.lockutils [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "1689f1f3-53f2-4c02-a969-e4dae21f14b7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.138766] env[68569]: DEBUG oslo_concurrency.lockutils [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "1689f1f3-53f2-4c02-a969-e4dae21f14b7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.140754] env[68569]: ERROR nova.compute.manager [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Setting instance vm_state to ERROR: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1019.140754] env[68569]: ERROR nova.compute.manager [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Traceback (most recent call last): [ 1019.140754] env[68569]: ERROR nova.compute.manager [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] File "/opt/stack/nova/nova/compute/manager.py", line 11478, in _error_out_instance_on_exception [ 1019.140754] env[68569]: ERROR nova.compute.manager [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] yield [ 1019.140754] env[68569]: ERROR nova.compute.manager [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 1019.140754] env[68569]: ERROR nova.compute.manager [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] disk_info = self.driver.migrate_disk_and_power_off( [ 1019.140754] env[68569]: ERROR nova.compute.manager [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1019.140754] env[68569]: ERROR nova.compute.manager [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] [ 1019.144302] env[68569]: INFO nova.compute.manager [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Terminating instance [ 1019.226083] env[68569]: DEBUG nova.compute.manager [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1019.251705] env[68569]: DEBUG nova.virt.hardware [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1019.251981] env[68569]: DEBUG nova.virt.hardware [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1019.252146] env[68569]: DEBUG nova.virt.hardware [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1019.252333] env[68569]: DEBUG nova.virt.hardware [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1019.252478] env[68569]: DEBUG nova.virt.hardware [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1019.252623] env[68569]: DEBUG nova.virt.hardware [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1019.252828] env[68569]: DEBUG nova.virt.hardware [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1019.252986] env[68569]: DEBUG nova.virt.hardware [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1019.253163] env[68569]: DEBUG nova.virt.hardware [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1019.253320] env[68569]: DEBUG nova.virt.hardware [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1019.253487] env[68569]: DEBUG nova.virt.hardware [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1019.254355] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94a0236-355d-453d-b165-65ff01e02a5c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.262265] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2feb2aa-e213-4559-9777-4e54e8f999e1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.290455] env[68569]: INFO nova.compute.manager [-] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Took 1.26 seconds to deallocate network for instance. [ 1019.493358] env[68569]: DEBUG nova.compute.manager [req-bb95abf1-d5e0-4d04-9104-37493e9ea959 req-fecb0292-9a40-4af9-8463-dda8d343e3e5 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Received event network-vif-plugged-f0211609-4c97-40c7-ba53-6f3802350533 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1019.493500] env[68569]: DEBUG oslo_concurrency.lockutils [req-bb95abf1-d5e0-4d04-9104-37493e9ea959 req-fecb0292-9a40-4af9-8463-dda8d343e3e5 service nova] Acquiring lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.494622] env[68569]: DEBUG oslo_concurrency.lockutils [req-bb95abf1-d5e0-4d04-9104-37493e9ea959 req-fecb0292-9a40-4af9-8463-dda8d343e3e5 service nova] Lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.494622] env[68569]: DEBUG oslo_concurrency.lockutils [req-bb95abf1-d5e0-4d04-9104-37493e9ea959 req-fecb0292-9a40-4af9-8463-dda8d343e3e5 service nova] Lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.494622] env[68569]: DEBUG nova.compute.manager [req-bb95abf1-d5e0-4d04-9104-37493e9ea959 req-fecb0292-9a40-4af9-8463-dda8d343e3e5 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] No waiting events found dispatching network-vif-plugged-f0211609-4c97-40c7-ba53-6f3802350533 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1019.494622] env[68569]: WARNING nova.compute.manager [req-bb95abf1-d5e0-4d04-9104-37493e9ea959 req-fecb0292-9a40-4af9-8463-dda8d343e3e5 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Received unexpected event network-vif-plugged-f0211609-4c97-40c7-ba53-6f3802350533 for instance with vm_state building and task_state spawning. [ 1019.580668] env[68569]: DEBUG nova.network.neutron [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Successfully updated port: f0211609-4c97-40c7-ba53-6f3802350533 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1019.606830] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.406s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.607380] env[68569]: DEBUG nova.compute.manager [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1019.609993] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.301s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.612774] env[68569]: INFO nova.compute.claims [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1019.652146] env[68569]: DEBUG nova.compute.manager [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1019.652379] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1019.654564] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f945575b-1b34-4cfe-bf3b-18d1186d38cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.663029] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1019.663029] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc2d4e66-25ca-4776-9e3c-035983eef779 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.665349] env[68569]: INFO nova.compute.manager [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Swapping old allocation on dict_keys(['a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6']) held by migration 08357d8f-bef1-4555-884e-6cf2c811f256 for instance [ 1019.672507] env[68569]: DEBUG oslo_vmware.api [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1019.672507] env[68569]: value = "task-3167501" [ 1019.672507] env[68569]: _type = "Task" [ 1019.672507] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.682696] env[68569]: DEBUG oslo_vmware.api [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167501, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.687639] env[68569]: DEBUG nova.scheduler.client.report [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Overwriting current allocation {'allocations': {'a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 137}}, 'project_id': '9cc82d358e214a959ae6b34c33344b86', 'user_id': 'afebab35cda9438781e2b466ce586405', 'consumer_generation': 1} on consumer 281d7077-391a-4cce-9d31-af41568a2b7c {{(pid=68569) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1019.796798] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.083645] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "refresh_cache-a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.083856] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "refresh_cache-a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.084079] env[68569]: DEBUG nova.network.neutron [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1020.120312] env[68569]: DEBUG nova.compute.utils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1020.121897] env[68569]: DEBUG nova.compute.manager [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1020.121897] env[68569]: DEBUG nova.network.neutron [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1020.159021] env[68569]: DEBUG nova.policy [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73b1c309d1494888945f033a8c5140a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa0ab47201c64b0d87480d4ff90014f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1020.182836] env[68569]: DEBUG oslo_vmware.api [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167501, 'name': PowerOffVM_Task, 'duration_secs': 0.17112} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.183194] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1020.183298] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.183547] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cbd8532d-554c-4a90-9293-8309ddb07deb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.246021] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1020.246281] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1020.246405] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Deleting the datastore file [datastore1] 1689f1f3-53f2-4c02-a969-e4dae21f14b7 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1020.247000] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6229768c-85fd-4ce0-8034-191ad5758d4e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.254355] env[68569]: DEBUG oslo_vmware.api [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1020.254355] env[68569]: value = "task-3167503" [ 1020.254355] env[68569]: _type = "Task" [ 1020.254355] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.262573] env[68569]: DEBUG oslo_vmware.api [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167503, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.291197] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "1670f03a-94e2-4005-be7e-41aad61a8925" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.291621] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "1670f03a-94e2-4005-be7e-41aad61a8925" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.291726] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "1670f03a-94e2-4005-be7e-41aad61a8925-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.291918] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "1670f03a-94e2-4005-be7e-41aad61a8925-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.292251] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "1670f03a-94e2-4005-be7e-41aad61a8925-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.294744] env[68569]: INFO nova.compute.manager [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Terminating instance [ 1020.492233] env[68569]: DEBUG nova.network.neutron [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Successfully created port: 82d84faa-d446-4ed1-b1c1-65399875d3f2 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1020.628761] env[68569]: DEBUG nova.compute.manager [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1020.635865] env[68569]: DEBUG nova.network.neutron [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1020.664718] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "281d7077-391a-4cce-9d31-af41568a2b7c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.665077] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "281d7077-391a-4cce-9d31-af41568a2b7c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.665373] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "281d7077-391a-4cce-9d31-af41568a2b7c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.665898] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "281d7077-391a-4cce-9d31-af41568a2b7c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.666204] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "281d7077-391a-4cce-9d31-af41568a2b7c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.669077] env[68569]: INFO nova.compute.manager [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Terminating instance [ 1020.773376] env[68569]: DEBUG oslo_vmware.api [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167503, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141093} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.776798] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1020.776896] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1020.777296] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1020.777582] env[68569]: INFO nova.compute.manager [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1020.777951] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1020.778660] env[68569]: DEBUG nova.compute.manager [-] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1020.778780] env[68569]: DEBUG nova.network.neutron [-] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1020.798685] env[68569]: DEBUG nova.compute.manager [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1020.798685] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.799916] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ab28d8-59da-4844-91c6-e355b71d164e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.812043] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.812264] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-edf85200-4b27-4e17-a41d-bbe4356d4553 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.887226] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1020.887636] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1020.887993] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleting the datastore file [datastore1] 1670f03a-94e2-4005-be7e-41aad61a8925 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1020.888369] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cdb76cf1-178e-485e-83be-49dbe04545a6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.896504] env[68569]: DEBUG oslo_vmware.api [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1020.896504] env[68569]: value = "task-3167505" [ 1020.896504] env[68569]: _type = "Task" [ 1020.896504] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.907093] env[68569]: DEBUG oslo_vmware.api [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167505, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.917712] env[68569]: DEBUG nova.network.neutron [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Updating instance_info_cache with network_info: [{"id": "f0211609-4c97-40c7-ba53-6f3802350533", "address": "fa:16:3e:31:d0:5c", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0211609-4c", "ovs_interfaceid": "f0211609-4c97-40c7-ba53-6f3802350533", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.940354] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e245477e-6563-42c8-9e94-9a06a60e4125 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.947629] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4068bd86-8cf0-4921-8f9a-e49777456ba7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.982085] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f1fc81-95a6-447b-bf19-8cab5e5b8e36 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.993119] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e004632-74a8-43cf-9811-46ebd9aaae41 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.005309] env[68569]: DEBUG nova.compute.provider_tree [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.129471] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "16112ff1-bda8-4a20-b69c-b847ade376b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.129771] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "16112ff1-bda8-4a20-b69c-b847ade376b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.181269] env[68569]: DEBUG nova.compute.manager [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1021.181531] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1021.182540] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860baa9c-124a-4aeb-bac0-0d5c6ae1e91b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.192160] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1021.192538] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb3ef409-3676-42f8-b0ec-13427d932cf4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.199988] env[68569]: DEBUG oslo_vmware.api [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1021.199988] env[68569]: value = "task-3167506" [ 1021.199988] env[68569]: _type = "Task" [ 1021.199988] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.207741] env[68569]: DEBUG oslo_vmware.api [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167506, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.261166] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.301064] env[68569]: DEBUG nova.compute.manager [req-42c14d71-a9d2-4a44-88b1-0dd731eb8bc0 req-ae9fdf39-855a-4648-bfb5-0db6f74048bf service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Received event network-vif-deleted-e0dc9415-fd47-40f4-aa7d-b89db0dccfae {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1021.301316] env[68569]: INFO nova.compute.manager [req-42c14d71-a9d2-4a44-88b1-0dd731eb8bc0 req-ae9fdf39-855a-4648-bfb5-0db6f74048bf service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Neutron deleted interface e0dc9415-fd47-40f4-aa7d-b89db0dccfae; detaching it from the instance and deleting it from the info cache [ 1021.301494] env[68569]: DEBUG nova.network.neutron [req-42c14d71-a9d2-4a44-88b1-0dd731eb8bc0 req-ae9fdf39-855a-4648-bfb5-0db6f74048bf service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.410249] env[68569]: DEBUG oslo_vmware.api [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167505, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.404059} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.410639] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1021.410928] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1021.411239] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1021.411527] env[68569]: INFO nova.compute.manager [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1021.413022] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1021.413022] env[68569]: DEBUG nova.compute.manager [-] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1021.413022] env[68569]: DEBUG nova.network.neutron [-] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1021.420068] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "refresh_cache-a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.420490] env[68569]: DEBUG nova.compute.manager [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Instance network_info: |[{"id": "f0211609-4c97-40c7-ba53-6f3802350533", "address": "fa:16:3e:31:d0:5c", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0211609-4c", "ovs_interfaceid": "f0211609-4c97-40c7-ba53-6f3802350533", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1021.421069] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:d0:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f8442aa5-73db-4599-8564-b98a6ea26b9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f0211609-4c97-40c7-ba53-6f3802350533', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1021.429748] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1021.430155] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1021.431674] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-642c0d7c-ba8a-4d0e-92c3-be7946b56503 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.465249] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1021.465249] env[68569]: value = "task-3167507" [ 1021.465249] env[68569]: _type = "Task" [ 1021.465249] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.474671] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167507, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.508912] env[68569]: DEBUG nova.scheduler.client.report [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1021.556168] env[68569]: DEBUG nova.compute.manager [req-972c1a07-f5ff-4cc9-bddb-b6cd062b299e req-d346847f-103c-4a63-af8c-1f6887f0c934 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Received event network-changed-f0211609-4c97-40c7-ba53-6f3802350533 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1021.556366] env[68569]: DEBUG nova.compute.manager [req-972c1a07-f5ff-4cc9-bddb-b6cd062b299e req-d346847f-103c-4a63-af8c-1f6887f0c934 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Refreshing instance network info cache due to event network-changed-f0211609-4c97-40c7-ba53-6f3802350533. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1021.556686] env[68569]: DEBUG oslo_concurrency.lockutils [req-972c1a07-f5ff-4cc9-bddb-b6cd062b299e req-d346847f-103c-4a63-af8c-1f6887f0c934 service nova] Acquiring lock "refresh_cache-a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.556883] env[68569]: DEBUG oslo_concurrency.lockutils [req-972c1a07-f5ff-4cc9-bddb-b6cd062b299e req-d346847f-103c-4a63-af8c-1f6887f0c934 service nova] Acquired lock "refresh_cache-a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.557114] env[68569]: DEBUG nova.network.neutron [req-972c1a07-f5ff-4cc9-bddb-b6cd062b299e req-d346847f-103c-4a63-af8c-1f6887f0c934 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Refreshing network info cache for port f0211609-4c97-40c7-ba53-6f3802350533 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1021.632429] env[68569]: DEBUG nova.compute.manager [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1021.646227] env[68569]: DEBUG nova.compute.manager [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1021.675423] env[68569]: DEBUG nova.virt.hardware [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1021.675720] env[68569]: DEBUG nova.virt.hardware [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1021.675991] env[68569]: DEBUG nova.virt.hardware [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1021.676193] env[68569]: DEBUG nova.virt.hardware [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1021.676392] env[68569]: DEBUG nova.virt.hardware [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1021.676566] env[68569]: DEBUG nova.virt.hardware [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1021.676812] env[68569]: DEBUG nova.virt.hardware [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1021.677046] env[68569]: DEBUG nova.virt.hardware [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1021.677268] env[68569]: DEBUG nova.virt.hardware [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1021.677454] env[68569]: DEBUG nova.virt.hardware [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1021.677883] env[68569]: DEBUG nova.virt.hardware [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1021.679368] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86c8507-c709-48b6-ae13-eee1d064bde9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.688365] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b1c448-e3a9-423c-987a-a73bb1dedb69 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.714131] env[68569]: DEBUG oslo_vmware.api [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167506, 'name': PowerOffVM_Task, 'duration_secs': 0.2315} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.714462] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1021.715014] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1021.715014] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8de72f5e-a764-4306-a56a-f9e8819a905b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.782965] env[68569]: DEBUG nova.network.neutron [-] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.804061] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-25f4e6f5-b677-4694-9cf9-964a6ba593c0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.814424] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f007f54-595e-4a74-9760-7044b0517396 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.846883] env[68569]: DEBUG nova.compute.manager [req-42c14d71-a9d2-4a44-88b1-0dd731eb8bc0 req-ae9fdf39-855a-4648-bfb5-0db6f74048bf service nova] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Detach interface failed, port_id=e0dc9415-fd47-40f4-aa7d-b89db0dccfae, reason: Instance 1689f1f3-53f2-4c02-a969-e4dae21f14b7 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1021.863105] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1021.863428] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1021.863593] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleting the datastore file [datastore1] 281d7077-391a-4cce-9d31-af41568a2b7c {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1021.863866] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e57f01b-e25e-485d-8599-a32c43c06389 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.873142] env[68569]: DEBUG oslo_vmware.api [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1021.873142] env[68569]: value = "task-3167509" [ 1021.873142] env[68569]: _type = "Task" [ 1021.873142] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.878438] env[68569]: DEBUG oslo_vmware.api [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167509, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.977660] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167507, 'name': CreateVM_Task, 'duration_secs': 0.393244} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.977889] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1021.978463] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.978653] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.978974] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1021.979251] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef65bb65-e882-40cb-b00a-aa2616f88257 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.983786] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1021.983786] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f1cd06-e79a-ffaf-7d34-b35b2e012946" [ 1021.983786] env[68569]: _type = "Task" [ 1021.983786] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.994703] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f1cd06-e79a-ffaf-7d34-b35b2e012946, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.013771] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.404s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.014434] env[68569]: DEBUG nova.compute.manager [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1022.017122] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.358s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.018616] env[68569]: INFO nova.compute.claims [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1022.096631] env[68569]: DEBUG nova.network.neutron [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Successfully updated port: 82d84faa-d446-4ed1-b1c1-65399875d3f2 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1022.153312] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.209391] env[68569]: DEBUG nova.network.neutron [-] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.263854] env[68569]: DEBUG nova.network.neutron [req-972c1a07-f5ff-4cc9-bddb-b6cd062b299e req-d346847f-103c-4a63-af8c-1f6887f0c934 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Updated VIF entry in instance network info cache for port f0211609-4c97-40c7-ba53-6f3802350533. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1022.264356] env[68569]: DEBUG nova.network.neutron [req-972c1a07-f5ff-4cc9-bddb-b6cd062b299e req-d346847f-103c-4a63-af8c-1f6887f0c934 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Updating instance_info_cache with network_info: [{"id": "f0211609-4c97-40c7-ba53-6f3802350533", "address": "fa:16:3e:31:d0:5c", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0211609-4c", "ovs_interfaceid": "f0211609-4c97-40c7-ba53-6f3802350533", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.284956] env[68569]: INFO nova.compute.manager [-] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Took 1.51 seconds to deallocate network for instance. [ 1022.380454] env[68569]: DEBUG oslo_vmware.api [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167509, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12574} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.380696] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1022.380926] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1022.381127] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1022.381299] env[68569]: INFO nova.compute.manager [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1022.381536] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1022.381719] env[68569]: DEBUG nova.compute.manager [-] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1022.381815] env[68569]: DEBUG nova.network.neutron [-] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1022.494167] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f1cd06-e79a-ffaf-7d34-b35b2e012946, 'name': SearchDatastore_Task, 'duration_secs': 0.017857} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.494523] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.494756] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1022.494994] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.495163] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.495346] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1022.495615] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6580d143-1124-4392-9aca-cb67eb5e636c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.504846] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1022.505042] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1022.506214] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e807a6d-10d0-4c28-b536-5f5eefefbd0c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.510819] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1022.510819] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d1bf79-95f8-d83d-65d2-9ac5bbbfd3d2" [ 1022.510819] env[68569]: _type = "Task" [ 1022.510819] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.518447] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d1bf79-95f8-d83d-65d2-9ac5bbbfd3d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.522771] env[68569]: DEBUG nova.compute.utils [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1022.524163] env[68569]: DEBUG nova.compute.manager [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Not allocating networking since 'none' was specified. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1022.599121] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "refresh_cache-a7145443-aacb-4d9e-8e39-3741d0630849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.599283] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "refresh_cache-a7145443-aacb-4d9e-8e39-3741d0630849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.599776] env[68569]: DEBUG nova.network.neutron [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1022.712354] env[68569]: INFO nova.compute.manager [-] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Took 1.30 seconds to deallocate network for instance. [ 1022.767528] env[68569]: DEBUG oslo_concurrency.lockutils [req-972c1a07-f5ff-4cc9-bddb-b6cd062b299e req-d346847f-103c-4a63-af8c-1f6887f0c934 service nova] Releasing lock "refresh_cache-a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.791237] env[68569]: DEBUG oslo_concurrency.lockutils [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.022244] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d1bf79-95f8-d83d-65d2-9ac5bbbfd3d2, 'name': SearchDatastore_Task, 'duration_secs': 0.008436} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.023066] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13c4e73d-bf31-4da7-9a6f-1508bd8d5f8d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.027519] env[68569]: DEBUG nova.compute.manager [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1023.034211] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1023.034211] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52003a4d-a259-faff-e98e-e1972643b08d" [ 1023.034211] env[68569]: _type = "Task" [ 1023.034211] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.043042] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52003a4d-a259-faff-e98e-e1972643b08d, 'name': SearchDatastore_Task, 'duration_secs': 0.010295} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.043265] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.043520] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3/a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1023.043774] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34a87ae4-535d-4129-a566-33194812d10b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.050987] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1023.050987] env[68569]: value = "task-3167510" [ 1023.050987] env[68569]: _type = "Task" [ 1023.050987] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.062255] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167510, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.129978] env[68569]: DEBUG nova.network.neutron [-] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.144984] env[68569]: DEBUG nova.network.neutron [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1023.222019] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.305206] env[68569]: DEBUG nova.network.neutron [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Updating instance_info_cache with network_info: [{"id": "82d84faa-d446-4ed1-b1c1-65399875d3f2", "address": "fa:16:3e:e6:be:f5", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82d84faa-d4", "ovs_interfaceid": "82d84faa-d446-4ed1-b1c1-65399875d3f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.332442] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6928652f-c8db-4e61-aa3a-cd7455b6213b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.340817] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3190d1-3dd6-4302-bcb8-4491223b8355 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.347859] env[68569]: DEBUG nova.compute.manager [req-97af6362-cc4f-48ca-8102-77daa773e1b5 req-939f635d-f7e3-4e4b-ad2f-e71ca46b8322 service nova] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Received event network-vif-deleted-d8c3cd23-66ee-4f2c-a7df-7c5321a67e8a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1023.348062] env[68569]: DEBUG nova.compute.manager [req-97af6362-cc4f-48ca-8102-77daa773e1b5 req-939f635d-f7e3-4e4b-ad2f-e71ca46b8322 service nova] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Received event network-vif-deleted-ceb81daf-a1b8-41e6-a68d-583d6349ab13 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1023.378358] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb1d35f-b6e2-4a0f-a348-9e8c892d9678 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.387107] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f88ec4-baf7-4bff-8ab0-e955beaae285 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.401580] env[68569]: DEBUG nova.compute.provider_tree [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1023.560653] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167510, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486035} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.560949] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3/a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1023.561184] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1023.561440] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-77f4220b-6154-46a6-8f23-b0c22c1db2f6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.567527] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1023.567527] env[68569]: value = "task-3167511" [ 1023.567527] env[68569]: _type = "Task" [ 1023.567527] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.577107] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167511, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.584452] env[68569]: DEBUG nova.compute.manager [req-10509e53-6a7d-49d0-9af1-bfa29a137437 req-afbd032c-31c7-46b2-902d-37dc52238566 service nova] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Received event network-vif-plugged-82d84faa-d446-4ed1-b1c1-65399875d3f2 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1023.584674] env[68569]: DEBUG oslo_concurrency.lockutils [req-10509e53-6a7d-49d0-9af1-bfa29a137437 req-afbd032c-31c7-46b2-902d-37dc52238566 service nova] Acquiring lock "a7145443-aacb-4d9e-8e39-3741d0630849-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.584897] env[68569]: DEBUG oslo_concurrency.lockutils [req-10509e53-6a7d-49d0-9af1-bfa29a137437 req-afbd032c-31c7-46b2-902d-37dc52238566 service nova] Lock "a7145443-aacb-4d9e-8e39-3741d0630849-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.585137] env[68569]: DEBUG oslo_concurrency.lockutils [req-10509e53-6a7d-49d0-9af1-bfa29a137437 req-afbd032c-31c7-46b2-902d-37dc52238566 service nova] Lock "a7145443-aacb-4d9e-8e39-3741d0630849-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.585254] env[68569]: DEBUG nova.compute.manager [req-10509e53-6a7d-49d0-9af1-bfa29a137437 req-afbd032c-31c7-46b2-902d-37dc52238566 service nova] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] No waiting events found dispatching network-vif-plugged-82d84faa-d446-4ed1-b1c1-65399875d3f2 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1023.585440] env[68569]: WARNING nova.compute.manager [req-10509e53-6a7d-49d0-9af1-bfa29a137437 req-afbd032c-31c7-46b2-902d-37dc52238566 service nova] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Received unexpected event network-vif-plugged-82d84faa-d446-4ed1-b1c1-65399875d3f2 for instance with vm_state building and task_state spawning. [ 1023.585575] env[68569]: DEBUG nova.compute.manager [req-10509e53-6a7d-49d0-9af1-bfa29a137437 req-afbd032c-31c7-46b2-902d-37dc52238566 service nova] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Received event network-changed-82d84faa-d446-4ed1-b1c1-65399875d3f2 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1023.585723] env[68569]: DEBUG nova.compute.manager [req-10509e53-6a7d-49d0-9af1-bfa29a137437 req-afbd032c-31c7-46b2-902d-37dc52238566 service nova] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Refreshing instance network info cache due to event network-changed-82d84faa-d446-4ed1-b1c1-65399875d3f2. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1023.585883] env[68569]: DEBUG oslo_concurrency.lockutils [req-10509e53-6a7d-49d0-9af1-bfa29a137437 req-afbd032c-31c7-46b2-902d-37dc52238566 service nova] Acquiring lock "refresh_cache-a7145443-aacb-4d9e-8e39-3741d0630849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.631997] env[68569]: INFO nova.compute.manager [-] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Took 1.25 seconds to deallocate network for instance. [ 1023.810285] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "refresh_cache-a7145443-aacb-4d9e-8e39-3741d0630849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.810483] env[68569]: DEBUG nova.compute.manager [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Instance network_info: |[{"id": "82d84faa-d446-4ed1-b1c1-65399875d3f2", "address": "fa:16:3e:e6:be:f5", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82d84faa-d4", "ovs_interfaceid": "82d84faa-d446-4ed1-b1c1-65399875d3f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1023.810818] env[68569]: DEBUG oslo_concurrency.lockutils [req-10509e53-6a7d-49d0-9af1-bfa29a137437 req-afbd032c-31c7-46b2-902d-37dc52238566 service nova] Acquired lock "refresh_cache-a7145443-aacb-4d9e-8e39-3741d0630849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.811093] env[68569]: DEBUG nova.network.neutron [req-10509e53-6a7d-49d0-9af1-bfa29a137437 req-afbd032c-31c7-46b2-902d-37dc52238566 service nova] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Refreshing network info cache for port 82d84faa-d446-4ed1-b1c1-65399875d3f2 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1023.812360] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:be:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '82d84faa-d446-4ed1-b1c1-65399875d3f2', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1023.819750] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1023.822606] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1023.823071] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2fcf170b-86d9-4a87-83d8-c13926b29929 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.843978] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1023.843978] env[68569]: value = "task-3167512" [ 1023.843978] env[68569]: _type = "Task" [ 1023.843978] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.851759] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167512, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.931885] env[68569]: ERROR nova.scheduler.client.report [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [req-395eba36-af01-4d50-917e-8c71470c90c9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-395eba36-af01-4d50-917e-8c71470c90c9"}]} [ 1023.950903] env[68569]: DEBUG nova.scheduler.client.report [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1023.963894] env[68569]: DEBUG nova.scheduler.client.report [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1023.964140] env[68569]: DEBUG nova.compute.provider_tree [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1023.974763] env[68569]: DEBUG nova.scheduler.client.report [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1023.996321] env[68569]: DEBUG nova.scheduler.client.report [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1024.040713] env[68569]: DEBUG nova.compute.manager [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1024.058838] env[68569]: DEBUG nova.network.neutron [req-10509e53-6a7d-49d0-9af1-bfa29a137437 req-afbd032c-31c7-46b2-902d-37dc52238566 service nova] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Updated VIF entry in instance network info cache for port 82d84faa-d446-4ed1-b1c1-65399875d3f2. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1024.059289] env[68569]: DEBUG nova.network.neutron [req-10509e53-6a7d-49d0-9af1-bfa29a137437 req-afbd032c-31c7-46b2-902d-37dc52238566 service nova] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Updating instance_info_cache with network_info: [{"id": "82d84faa-d446-4ed1-b1c1-65399875d3f2", "address": "fa:16:3e:e6:be:f5", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82d84faa-d4", "ovs_interfaceid": "82d84faa-d446-4ed1-b1c1-65399875d3f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.076123] env[68569]: DEBUG nova.virt.hardware [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1024.076384] env[68569]: DEBUG nova.virt.hardware [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1024.076541] env[68569]: DEBUG nova.virt.hardware [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1024.076716] env[68569]: DEBUG nova.virt.hardware [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1024.076861] env[68569]: DEBUG nova.virt.hardware [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1024.077015] env[68569]: DEBUG nova.virt.hardware [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1024.077252] env[68569]: DEBUG nova.virt.hardware [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1024.077428] env[68569]: DEBUG nova.virt.hardware [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1024.077586] env[68569]: DEBUG nova.virt.hardware [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1024.077761] env[68569]: DEBUG nova.virt.hardware [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1024.077951] env[68569]: DEBUG nova.virt.hardware [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1024.079153] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816931fd-f7e6-4442-834e-12ff36a890ca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.092485] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f1a25e-9566-41bc-9f57-39b7115e9e6b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.096985] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167511, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070419} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.099542] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1024.100828] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe944654-f022-42f7-95ec-ed7908b6738f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.111820] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1024.117403] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Creating folder: Project (ce3249351ad945e1a7372e9a81c50b5c). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1024.120450] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-34e352a3-5ca7-43c3-a5c0-e14c8731d142 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.140249] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3/a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1024.144962] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.146305] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9994c61d-96a7-44aa-965b-d50b7e44655c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.161864] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Created folder: Project (ce3249351ad945e1a7372e9a81c50b5c) in parent group-v633430. [ 1024.162082] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Creating folder: Instances. Parent ref: group-v633679. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1024.162638] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f457a01-10a4-4f66-87f1-cfacbac05947 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.170040] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1024.170040] env[68569]: value = "task-3167514" [ 1024.170040] env[68569]: _type = "Task" [ 1024.170040] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.178571] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Created folder: Instances in parent group-v633679. [ 1024.178701] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1024.182058] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1024.182058] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-589dec25-349c-4587-ab41-a52a3220a3b4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.198633] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167514, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.207630] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1024.207630] env[68569]: value = "task-3167516" [ 1024.207630] env[68569]: _type = "Task" [ 1024.207630] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.215768] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167516, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.326649] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3938223-7027-4329-b739-cdc6c0605f1d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.334535] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78ca94a-7b5e-4230-b7a7-abbe85594a31 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.368576] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfad0157-2388-4068-b9ca-738971396137 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.376498] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167512, 'name': CreateVM_Task, 'duration_secs': 0.371811} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.378443] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1024.379166] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.379334] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.379644] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1024.380899] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9047bf2a-57dd-4dc3-ad76-3863596d75d5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.384489] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9a4cd53-b6d8-4cb4-8d99-eaab58597a64 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.396439] env[68569]: DEBUG nova.compute.provider_tree [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1024.398676] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1024.398676] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5214fa32-2e30-de5c-fa2c-10064cff7e81" [ 1024.398676] env[68569]: _type = "Task" [ 1024.398676] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.406983] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5214fa32-2e30-de5c-fa2c-10064cff7e81, 'name': SearchDatastore_Task, 'duration_secs': 0.011441} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.407842] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.408094] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1024.408332] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.408477] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.408656] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1024.409183] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c44f04a9-66ef-44bf-82b7-23da5ff395b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.417669] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1024.417850] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1024.418573] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac0b6ee1-baf4-4fd0-9062-098189aa42be {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.423449] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1024.423449] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5296189f-5a5e-ac2f-1301-05d00d76c053" [ 1024.423449] env[68569]: _type = "Task" [ 1024.423449] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.433792] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5296189f-5a5e-ac2f-1301-05d00d76c053, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.568350] env[68569]: DEBUG oslo_concurrency.lockutils [req-10509e53-6a7d-49d0-9af1-bfa29a137437 req-afbd032c-31c7-46b2-902d-37dc52238566 service nova] Releasing lock "refresh_cache-a7145443-aacb-4d9e-8e39-3741d0630849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.680695] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167514, 'name': ReconfigVM_Task, 'duration_secs': 0.366075} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.680985] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Reconfigured VM instance instance-0000005a to attach disk [datastore2] a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3/a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1024.681646] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d8630b3-f21e-429b-8b63-8eb2a0faec2a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.687738] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1024.687738] env[68569]: value = "task-3167517" [ 1024.687738] env[68569]: _type = "Task" [ 1024.687738] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.695361] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167517, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.717040] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167516, 'name': CreateVM_Task, 'duration_secs': 0.341161} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.717040] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1024.717432] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.717620] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.718009] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1024.718270] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b3384f7-0972-4135-8ecb-883cced4654c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.722585] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1024.722585] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bfbe50-8cb0-b48c-cc59-a5102d431c1c" [ 1024.722585] env[68569]: _type = "Task" [ 1024.722585] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.729889] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bfbe50-8cb0-b48c-cc59-a5102d431c1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.929536] env[68569]: DEBUG nova.scheduler.client.report [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 139 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1024.929797] env[68569]: DEBUG nova.compute.provider_tree [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 139 to 140 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1024.930068] env[68569]: DEBUG nova.compute.provider_tree [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1024.937054] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5296189f-5a5e-ac2f-1301-05d00d76c053, 'name': SearchDatastore_Task, 'duration_secs': 0.009055} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.937678] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eddeb764-b3a0-43f4-a3bb-aa7b2eff60f2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.942797] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1024.942797] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52807d02-d983-4415-fab1-935c2e444ef5" [ 1024.942797] env[68569]: _type = "Task" [ 1024.942797] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.951052] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52807d02-d983-4415-fab1-935c2e444ef5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.196944] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167517, 'name': Rename_Task, 'duration_secs': 0.136974} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.197349] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1025.197627] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f95b4bf-ada2-454d-a757-3d29e3075dea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.204471] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1025.204471] env[68569]: value = "task-3167518" [ 1025.204471] env[68569]: _type = "Task" [ 1025.204471] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.212619] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167518, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.231513] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bfbe50-8cb0-b48c-cc59-a5102d431c1c, 'name': SearchDatastore_Task, 'duration_secs': 0.009929} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.231793] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.232043] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1025.232264] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.438847] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.422s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.439430] env[68569]: DEBUG nova.compute.manager [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1025.442165] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.731s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.443550] env[68569]: INFO nova.compute.claims [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1025.455117] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52807d02-d983-4415-fab1-935c2e444ef5, 'name': SearchDatastore_Task, 'duration_secs': 0.009094} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.455289] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.455526] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] a7145443-aacb-4d9e-8e39-3741d0630849/a7145443-aacb-4d9e-8e39-3741d0630849.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1025.455788] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1025.455972] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1025.456188] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca6312ad-c8bf-43bd-97f5-b4ed01092071 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.458014] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-683d4590-1c5f-47f9-8bf2-871e5c5723cb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.465060] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1025.465060] env[68569]: value = "task-3167519" [ 1025.465060] env[68569]: _type = "Task" [ 1025.465060] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.469098] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1025.470125] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1025.470125] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa955656-b597-484d-8794-73960a262ee0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.475550] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167519, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.479228] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1025.479228] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529bdfe2-4c29-9aa1-63c5-c4b3998b0db7" [ 1025.479228] env[68569]: _type = "Task" [ 1025.479228] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.487404] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529bdfe2-4c29-9aa1-63c5-c4b3998b0db7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.718640] env[68569]: DEBUG oslo_vmware.api [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167518, 'name': PowerOnVM_Task, 'duration_secs': 0.461496} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.719144] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1025.719443] env[68569]: INFO nova.compute.manager [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Took 6.49 seconds to spawn the instance on the hypervisor. [ 1025.719643] env[68569]: DEBUG nova.compute.manager [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1025.720692] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4145ab24-ee90-467e-bc5a-0ac0fe8ff356 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.947400] env[68569]: DEBUG nova.compute.utils [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1025.950819] env[68569]: DEBUG nova.compute.manager [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Not allocating networking since 'none' was specified. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1025.975588] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167519, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465467} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.975668] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] a7145443-aacb-4d9e-8e39-3741d0630849/a7145443-aacb-4d9e-8e39-3741d0630849.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1025.975850] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1025.976110] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9016947-fa3d-4472-9767-e26438080b45 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.983602] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1025.983602] env[68569]: value = "task-3167520" [ 1025.983602] env[68569]: _type = "Task" [ 1025.983602] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.990080] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529bdfe2-4c29-9aa1-63c5-c4b3998b0db7, 'name': SearchDatastore_Task, 'duration_secs': 0.008515} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.991081] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08d0e836-5a3a-466a-a9a7-4098af95d985 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.996044] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167520, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.998878] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1025.998878] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52720d03-e85e-7078-055a-f934fa8d0f35" [ 1025.998878] env[68569]: _type = "Task" [ 1025.998878] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.006020] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52720d03-e85e-7078-055a-f934fa8d0f35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.241616] env[68569]: INFO nova.compute.manager [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Took 19.10 seconds to build instance. [ 1026.451867] env[68569]: DEBUG nova.compute.manager [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1026.498238] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167520, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066878} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.498238] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1026.498393] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d093e68-99cd-4a0e-8818-169c79a026ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.526334] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] a7145443-aacb-4d9e-8e39-3741d0630849/a7145443-aacb-4d9e-8e39-3741d0630849.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1026.532018] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e8cbe90-0158-42d9-888f-c6b59b3d055d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.546607] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52720d03-e85e-7078-055a-f934fa8d0f35, 'name': SearchDatastore_Task, 'duration_secs': 0.008475} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.547724] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.547724] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 7b10cfb4-dc0a-4311-a24f-7a25869ef594/7b10cfb4-dc0a-4311-a24f-7a25869ef594.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1026.548254] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5906df9e-3e8f-4bc9-b548-1562d361bfd6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.553568] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1026.553568] env[68569]: value = "task-3167521" [ 1026.553568] env[68569]: _type = "Task" [ 1026.553568] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.559634] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1026.559634] env[68569]: value = "task-3167522" [ 1026.559634] env[68569]: _type = "Task" [ 1026.559634] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.565597] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167521, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.570971] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167522, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.743417] env[68569]: DEBUG oslo_concurrency.lockutils [None req-72f2f0ad-4d96-4acc-bc4e-69bd22c327b9 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.613s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.750888] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8feee95-d1eb-4a8c-9ea1-a9fd593e99ce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.761568] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8938df-025b-4061-b2d7-af7cd89822b3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.795522] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f5f423-a6b7-4403-89c8-164645c704c7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.805555] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c217446e-18c3-497b-9706-454269f35740 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.821560] env[68569]: DEBUG nova.compute.provider_tree [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1026.995929] env[68569]: DEBUG nova.compute.manager [req-5723840d-ab53-48b2-af7b-d891f2a92d0b req-289a9ca2-0611-498d-bbb1-114b46370e49 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Received event network-changed-f0211609-4c97-40c7-ba53-6f3802350533 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1026.995929] env[68569]: DEBUG nova.compute.manager [req-5723840d-ab53-48b2-af7b-d891f2a92d0b req-289a9ca2-0611-498d-bbb1-114b46370e49 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Refreshing instance network info cache due to event network-changed-f0211609-4c97-40c7-ba53-6f3802350533. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1026.995929] env[68569]: DEBUG oslo_concurrency.lockutils [req-5723840d-ab53-48b2-af7b-d891f2a92d0b req-289a9ca2-0611-498d-bbb1-114b46370e49 service nova] Acquiring lock "refresh_cache-a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.995929] env[68569]: DEBUG oslo_concurrency.lockutils [req-5723840d-ab53-48b2-af7b-d891f2a92d0b req-289a9ca2-0611-498d-bbb1-114b46370e49 service nova] Acquired lock "refresh_cache-a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.995929] env[68569]: DEBUG nova.network.neutron [req-5723840d-ab53-48b2-af7b-d891f2a92d0b req-289a9ca2-0611-498d-bbb1-114b46370e49 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Refreshing network info cache for port f0211609-4c97-40c7-ba53-6f3802350533 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1027.064682] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167521, 'name': ReconfigVM_Task, 'duration_secs': 0.397952} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.067738] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Reconfigured VM instance instance-0000005b to attach disk [datastore1] a7145443-aacb-4d9e-8e39-3741d0630849/a7145443-aacb-4d9e-8e39-3741d0630849.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1027.068400] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1c47f46b-5e84-4936-a833-f590fa5644b1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.075202] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167522, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472301} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.076391] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 7b10cfb4-dc0a-4311-a24f-7a25869ef594/7b10cfb4-dc0a-4311-a24f-7a25869ef594.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1027.076610] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1027.076919] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1027.076919] env[68569]: value = "task-3167523" [ 1027.076919] env[68569]: _type = "Task" [ 1027.076919] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.077150] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58ce3642-e421-4a4b-ac26-83cad9b4f9f5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.091294] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167523, 'name': Rename_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.097020] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1027.097020] env[68569]: value = "task-3167524" [ 1027.097020] env[68569]: _type = "Task" [ 1027.097020] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.109202] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167524, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.356760] env[68569]: DEBUG nova.scheduler.client.report [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 140 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1027.356760] env[68569]: DEBUG nova.compute.provider_tree [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 140 to 141 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1027.356760] env[68569]: DEBUG nova.compute.provider_tree [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1027.470629] env[68569]: DEBUG nova.compute.manager [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1027.498340] env[68569]: DEBUG nova.virt.hardware [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1027.498563] env[68569]: DEBUG nova.virt.hardware [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1027.498717] env[68569]: DEBUG nova.virt.hardware [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1027.498892] env[68569]: DEBUG nova.virt.hardware [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1027.499044] env[68569]: DEBUG nova.virt.hardware [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1027.499197] env[68569]: DEBUG nova.virt.hardware [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1027.499397] env[68569]: DEBUG nova.virt.hardware [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1027.499559] env[68569]: DEBUG nova.virt.hardware [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1027.499738] env[68569]: DEBUG nova.virt.hardware [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1027.499898] env[68569]: DEBUG nova.virt.hardware [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1027.500249] env[68569]: DEBUG nova.virt.hardware [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1027.502897] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4666b865-46e9-4458-8327-5263cf5ff7d5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.511576] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142967bd-2bad-4d1d-b498-fb546600f929 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.525281] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1027.532113] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1027.532113] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1027.532113] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ee120b2-c206-4e1b-9497-502657935dc0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.548620] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1027.548620] env[68569]: value = "task-3167525" [ 1027.548620] env[68569]: _type = "Task" [ 1027.548620] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.559379] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167525, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.588848] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167523, 'name': Rename_Task, 'duration_secs': 0.13983} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.589133] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1027.589380] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-deb2a061-16bb-4280-a15e-610a78beff81 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.600186] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1027.600186] env[68569]: value = "task-3167526" [ 1027.600186] env[68569]: _type = "Task" [ 1027.600186] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.606267] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167524, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06573} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.606877] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1027.607746] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4946cc-07bf-4f4f-9444-110adbc3d6a7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.613748] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167526, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.633681] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 7b10cfb4-dc0a-4311-a24f-7a25869ef594/7b10cfb4-dc0a-4311-a24f-7a25869ef594.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1027.637060] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36bd1621-152b-45b9-9162-4ffebd95954e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.656683] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1027.656683] env[68569]: value = "task-3167527" [ 1027.656683] env[68569]: _type = "Task" [ 1027.656683] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.664617] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167527, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.765310] env[68569]: DEBUG nova.network.neutron [req-5723840d-ab53-48b2-af7b-d891f2a92d0b req-289a9ca2-0611-498d-bbb1-114b46370e49 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Updated VIF entry in instance network info cache for port f0211609-4c97-40c7-ba53-6f3802350533. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1027.765801] env[68569]: DEBUG nova.network.neutron [req-5723840d-ab53-48b2-af7b-d891f2a92d0b req-289a9ca2-0611-498d-bbb1-114b46370e49 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Updating instance_info_cache with network_info: [{"id": "f0211609-4c97-40c7-ba53-6f3802350533", "address": "fa:16:3e:31:d0:5c", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0211609-4c", "ovs_interfaceid": "f0211609-4c97-40c7-ba53-6f3802350533", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.862489] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.420s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.863076] env[68569]: DEBUG nova.compute.manager [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1027.865913] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.922s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.866147] env[68569]: DEBUG nova.objects.instance [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lazy-loading 'pci_requests' on Instance uuid c0ea0ef8-93c2-416a-8caa-a51f7a39627e {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1028.063646] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167525, 'name': CreateVM_Task, 'duration_secs': 0.495127} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.063831] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1028.064284] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.064492] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.064831] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1028.065107] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0a1acad-5e11-4042-9410-a947846cf7d0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.070350] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1028.070350] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522ef94e-054c-4b42-f0c6-51b1bb14cda6" [ 1028.070350] env[68569]: _type = "Task" [ 1028.070350] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.079152] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522ef94e-054c-4b42-f0c6-51b1bb14cda6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.109696] env[68569]: DEBUG oslo_vmware.api [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167526, 'name': PowerOnVM_Task, 'duration_secs': 0.51024} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.109955] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1028.110272] env[68569]: INFO nova.compute.manager [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Took 6.46 seconds to spawn the instance on the hypervisor. [ 1028.110452] env[68569]: DEBUG nova.compute.manager [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1028.111234] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f312c5b-7a29-4fd7-8948-0517c50b58f5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.166651] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167527, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.269152] env[68569]: DEBUG oslo_concurrency.lockutils [req-5723840d-ab53-48b2-af7b-d891f2a92d0b req-289a9ca2-0611-498d-bbb1-114b46370e49 service nova] Releasing lock "refresh_cache-a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.306093] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.306278] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.370453] env[68569]: DEBUG nova.compute.utils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1028.373217] env[68569]: DEBUG nova.objects.instance [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lazy-loading 'numa_topology' on Instance uuid c0ea0ef8-93c2-416a-8caa-a51f7a39627e {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1028.374401] env[68569]: DEBUG nova.compute.manager [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1028.374568] env[68569]: DEBUG nova.network.neutron [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1028.458284] env[68569]: DEBUG nova.policy [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '868bbe891585423f85374f6dffdc7813', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62ff17f9dcc242e0aff061402e57bdcd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1028.582423] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522ef94e-054c-4b42-f0c6-51b1bb14cda6, 'name': SearchDatastore_Task, 'duration_secs': 0.011957} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.583117] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.583481] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1028.583987] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.584297] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.584755] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1028.587267] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f892aa1-f212-4f87-9d18-9b80a9bedc07 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.595642] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1028.595642] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1028.595642] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbf9febf-e028-4cf7-82f7-f14d669d111b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.600025] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1028.600025] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52194d5e-b9ed-ab13-3814-00fa80265f8d" [ 1028.600025] env[68569]: _type = "Task" [ 1028.600025] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.607435] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52194d5e-b9ed-ab13-3814-00fa80265f8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.628743] env[68569]: INFO nova.compute.manager [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Took 20.62 seconds to build instance. [ 1028.666703] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167527, 'name': ReconfigVM_Task, 'duration_secs': 0.646469} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.667085] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 7b10cfb4-dc0a-4311-a24f-7a25869ef594/7b10cfb4-dc0a-4311-a24f-7a25869ef594.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1028.667707] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8e7e3ccb-cf3f-45b9-831c-fed338b5a72d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.674098] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1028.674098] env[68569]: value = "task-3167528" [ 1028.674098] env[68569]: _type = "Task" [ 1028.674098] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.682072] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167528, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.809742] env[68569]: DEBUG nova.compute.utils [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1028.875271] env[68569]: DEBUG nova.compute.manager [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1028.878498] env[68569]: INFO nova.compute.claims [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1029.009147] env[68569]: DEBUG nova.network.neutron [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Successfully created port: 642bbbb7-8eeb-4920-bfce-44e3f938dddb {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1029.110485] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52194d5e-b9ed-ab13-3814-00fa80265f8d, 'name': SearchDatastore_Task, 'duration_secs': 0.007976} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.111344] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2207e10e-09d1-4fe7-ae79-d0adf50813f8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.117260] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1029.117260] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52555a19-a090-e82b-eeb6-cc8751077dd9" [ 1029.117260] env[68569]: _type = "Task" [ 1029.117260] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.125295] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52555a19-a090-e82b-eeb6-cc8751077dd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.130874] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4b20a80a-8858-47f6-8dc8-fced2145216d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a7145443-aacb-4d9e-8e39-3741d0630849" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.135s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.184173] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167528, 'name': Rename_Task, 'duration_secs': 0.460145} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.184479] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1029.184732] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73871380-31e8-48cf-a8c3-2eb3958dd92e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.190641] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1029.190641] env[68569]: value = "task-3167529" [ 1029.190641] env[68569]: _type = "Task" [ 1029.190641] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.198631] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167529, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.238834] env[68569]: DEBUG nova.compute.manager [req-e2f31125-9773-44a6-9d91-8f559f82cbd5 req-ad18f1c4-9a8c-44b2-8264-9a31671af24d service nova] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Received event network-changed-82d84faa-d446-4ed1-b1c1-65399875d3f2 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1029.239034] env[68569]: DEBUG nova.compute.manager [req-e2f31125-9773-44a6-9d91-8f559f82cbd5 req-ad18f1c4-9a8c-44b2-8264-9a31671af24d service nova] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Refreshing instance network info cache due to event network-changed-82d84faa-d446-4ed1-b1c1-65399875d3f2. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1029.239247] env[68569]: DEBUG oslo_concurrency.lockutils [req-e2f31125-9773-44a6-9d91-8f559f82cbd5 req-ad18f1c4-9a8c-44b2-8264-9a31671af24d service nova] Acquiring lock "refresh_cache-a7145443-aacb-4d9e-8e39-3741d0630849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.239394] env[68569]: DEBUG oslo_concurrency.lockutils [req-e2f31125-9773-44a6-9d91-8f559f82cbd5 req-ad18f1c4-9a8c-44b2-8264-9a31671af24d service nova] Acquired lock "refresh_cache-a7145443-aacb-4d9e-8e39-3741d0630849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.239541] env[68569]: DEBUG nova.network.neutron [req-e2f31125-9773-44a6-9d91-8f559f82cbd5 req-ad18f1c4-9a8c-44b2-8264-9a31671af24d service nova] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Refreshing network info cache for port 82d84faa-d446-4ed1-b1c1-65399875d3f2 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1029.314438] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.629906] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52555a19-a090-e82b-eeb6-cc8751077dd9, 'name': SearchDatastore_Task, 'duration_secs': 0.009517} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.630221] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.630489] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc/4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1029.631366] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d823eaa2-2625-4b7b-bcfa-cc4715042737 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.637509] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce8d810-9354-4d08-afa0-c8ceb9fe037a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.641282] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1029.641282] env[68569]: value = "task-3167530" [ 1029.641282] env[68569]: _type = "Task" [ 1029.641282] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.647110] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81357120-16a7-4fd7-9446-0b1453f99bc6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.652952] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.681764] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04dc51a-a5e4-4cb8-854b-b0b1ff910225 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.689183] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e44189e-4316-4b59-b8c3-9103de418757 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.706084] env[68569]: DEBUG nova.compute.provider_tree [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.710604] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167529, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.892310] env[68569]: DEBUG nova.compute.manager [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1029.929120] env[68569]: DEBUG nova.virt.hardware [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1029.929120] env[68569]: DEBUG nova.virt.hardware [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1029.929120] env[68569]: DEBUG nova.virt.hardware [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1029.929120] env[68569]: DEBUG nova.virt.hardware [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1029.929308] env[68569]: DEBUG nova.virt.hardware [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1029.929482] env[68569]: DEBUG nova.virt.hardware [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1029.929870] env[68569]: DEBUG nova.virt.hardware [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1029.930245] env[68569]: DEBUG nova.virt.hardware [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1029.932029] env[68569]: DEBUG nova.virt.hardware [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1029.932029] env[68569]: DEBUG nova.virt.hardware [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1029.932029] env[68569]: DEBUG nova.virt.hardware [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1029.932306] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6a934a-d8bd-47b9-8a90-60a65abdd097 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.941420] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c689fc-b796-4828-9cf2-48f80ca96ddc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.150692] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167530, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458367} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.150950] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc/4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1030.151190] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1030.151423] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-168f71a9-fcbd-4aaf-9a8e-3a12404bbfaf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.158084] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1030.158084] env[68569]: value = "task-3167531" [ 1030.158084] env[68569]: _type = "Task" [ 1030.158084] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.165078] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167531, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.206959] env[68569]: DEBUG oslo_vmware.api [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167529, 'name': PowerOnVM_Task, 'duration_secs': 0.724215} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.206959] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1030.206959] env[68569]: INFO nova.compute.manager [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Took 6.16 seconds to spawn the instance on the hypervisor. [ 1030.206959] env[68569]: DEBUG nova.compute.manager [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1030.206959] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8179d52a-0720-4c56-bb5f-a0930a0b22ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.212800] env[68569]: DEBUG nova.scheduler.client.report [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1030.219055] env[68569]: DEBUG nova.network.neutron [req-e2f31125-9773-44a6-9d91-8f559f82cbd5 req-ad18f1c4-9a8c-44b2-8264-9a31671af24d service nova] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Updated VIF entry in instance network info cache for port 82d84faa-d446-4ed1-b1c1-65399875d3f2. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1030.219158] env[68569]: DEBUG nova.network.neutron [req-e2f31125-9773-44a6-9d91-8f559f82cbd5 req-ad18f1c4-9a8c-44b2-8264-9a31671af24d service nova] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Updating instance_info_cache with network_info: [{"id": "82d84faa-d446-4ed1-b1c1-65399875d3f2", "address": "fa:16:3e:e6:be:f5", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82d84faa-d4", "ovs_interfaceid": "82d84faa-d446-4ed1-b1c1-65399875d3f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.409127] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.409379] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.409604] env[68569]: INFO nova.compute.manager [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Attaching volume 90a6a766-5e36-48f2-84f2-a3b3be1783a7 to /dev/sdb [ 1030.441470] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546d009d-5441-44b9-989b-d173d56e415e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.448889] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b95a11-4746-41a8-bd50-9082151cc4e4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.462275] env[68569]: DEBUG nova.virt.block_device [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Updating existing volume attachment record: 25079ea8-e5cc-41e0-90be-d5ffc00af338 {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1030.536724] env[68569]: DEBUG nova.network.neutron [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Successfully updated port: 642bbbb7-8eeb-4920-bfce-44e3f938dddb {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.667672] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167531, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077646} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.667938] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1030.668727] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbf8189-6900-460a-b109-3c566f2b198a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.688729] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc/4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1030.689027] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a64aa9ed-91a7-4f2f-b190-281b1185e215 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.709628] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1030.709628] env[68569]: value = "task-3167535" [ 1030.709628] env[68569]: _type = "Task" [ 1030.709628] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.717374] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167535, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.721350] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.855s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.729029] env[68569]: DEBUG oslo_concurrency.lockutils [req-e2f31125-9773-44a6-9d91-8f559f82cbd5 req-ad18f1c4-9a8c-44b2-8264-9a31671af24d service nova] Releasing lock "refresh_cache-a7145443-aacb-4d9e-8e39-3741d0630849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.731939] env[68569]: INFO nova.compute.manager [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Took 21.44 seconds to build instance. [ 1030.734199] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.448s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.734878] env[68569]: INFO nova.compute.claims [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1030.788841] env[68569]: INFO nova.network.neutron [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Updating port d13b7716-5b05-4896-9da9-e3674d55a3a0 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1031.040936] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "refresh_cache-671f7e6d-703e-48a9-8509-2a8924afe911" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.040936] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired lock "refresh_cache-671f7e6d-703e-48a9-8509-2a8924afe911" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.040936] env[68569]: DEBUG nova.network.neutron [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1031.220146] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167535, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.238881] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e8817647-601e-44bb-ba90-66973a2a26ed tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "7b10cfb4-dc0a-4311-a24f-7a25869ef594" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.958s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.315891] env[68569]: DEBUG nova.compute.manager [req-1bcf8b6b-7c9d-463d-a912-8921b59022bc req-e3f21994-0c3e-444c-a45e-d074108564d4 service nova] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Received event network-vif-plugged-642bbbb7-8eeb-4920-bfce-44e3f938dddb {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1031.316125] env[68569]: DEBUG oslo_concurrency.lockutils [req-1bcf8b6b-7c9d-463d-a912-8921b59022bc req-e3f21994-0c3e-444c-a45e-d074108564d4 service nova] Acquiring lock "671f7e6d-703e-48a9-8509-2a8924afe911-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.316338] env[68569]: DEBUG oslo_concurrency.lockutils [req-1bcf8b6b-7c9d-463d-a912-8921b59022bc req-e3f21994-0c3e-444c-a45e-d074108564d4 service nova] Lock "671f7e6d-703e-48a9-8509-2a8924afe911-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.316484] env[68569]: DEBUG oslo_concurrency.lockutils [req-1bcf8b6b-7c9d-463d-a912-8921b59022bc req-e3f21994-0c3e-444c-a45e-d074108564d4 service nova] Lock "671f7e6d-703e-48a9-8509-2a8924afe911-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.316644] env[68569]: DEBUG nova.compute.manager [req-1bcf8b6b-7c9d-463d-a912-8921b59022bc req-e3f21994-0c3e-444c-a45e-d074108564d4 service nova] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] No waiting events found dispatching network-vif-plugged-642bbbb7-8eeb-4920-bfce-44e3f938dddb {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1031.316868] env[68569]: WARNING nova.compute.manager [req-1bcf8b6b-7c9d-463d-a912-8921b59022bc req-e3f21994-0c3e-444c-a45e-d074108564d4 service nova] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Received unexpected event network-vif-plugged-642bbbb7-8eeb-4920-bfce-44e3f938dddb for instance with vm_state building and task_state spawning. [ 1031.316948] env[68569]: DEBUG nova.compute.manager [req-1bcf8b6b-7c9d-463d-a912-8921b59022bc req-e3f21994-0c3e-444c-a45e-d074108564d4 service nova] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Received event network-changed-642bbbb7-8eeb-4920-bfce-44e3f938dddb {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1031.317402] env[68569]: DEBUG nova.compute.manager [req-1bcf8b6b-7c9d-463d-a912-8921b59022bc req-e3f21994-0c3e-444c-a45e-d074108564d4 service nova] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Refreshing instance network info cache due to event network-changed-642bbbb7-8eeb-4920-bfce-44e3f938dddb. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1031.317597] env[68569]: DEBUG oslo_concurrency.lockutils [req-1bcf8b6b-7c9d-463d-a912-8921b59022bc req-e3f21994-0c3e-444c-a45e-d074108564d4 service nova] Acquiring lock "refresh_cache-671f7e6d-703e-48a9-8509-2a8924afe911" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.584475] env[68569]: DEBUG nova.network.neutron [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1031.715259] env[68569]: DEBUG nova.network.neutron [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Updating instance_info_cache with network_info: [{"id": "642bbbb7-8eeb-4920-bfce-44e3f938dddb", "address": "fa:16:3e:f2:e2:d9", "network": {"id": "bf7e9923-9223-4b14-8176-d3910d2b1c90", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1728039945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62ff17f9dcc242e0aff061402e57bdcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap642bbbb7-8e", "ovs_interfaceid": "642bbbb7-8eeb-4920-bfce-44e3f938dddb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.721669] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167535, 'name': ReconfigVM_Task, 'duration_secs': 0.586664} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.721932] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc/4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1031.722541] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-68058d86-b1da-402e-9087-2063e1ba163e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.729717] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1031.729717] env[68569]: value = "task-3167536" [ 1031.729717] env[68569]: _type = "Task" [ 1031.729717] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.737595] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167536, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.995625] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff830df-099a-4d9c-b58b-952ae64e7367 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.003364] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e76225f-76d8-431d-bba9-770c73b7d798 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.033781] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6be601f-be4d-4f13-bc97-2ccc95c8c641 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.041534] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c7516d-60e4-4e7b-b84f-e9cef5bfba27 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.054808] env[68569]: DEBUG nova.compute.provider_tree [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.217641] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lock "refresh_cache-671f7e6d-703e-48a9-8509-2a8924afe911" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.217990] env[68569]: DEBUG nova.compute.manager [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Instance network_info: |[{"id": "642bbbb7-8eeb-4920-bfce-44e3f938dddb", "address": "fa:16:3e:f2:e2:d9", "network": {"id": "bf7e9923-9223-4b14-8176-d3910d2b1c90", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1728039945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62ff17f9dcc242e0aff061402e57bdcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap642bbbb7-8e", "ovs_interfaceid": "642bbbb7-8eeb-4920-bfce-44e3f938dddb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1032.218345] env[68569]: DEBUG oslo_concurrency.lockutils [req-1bcf8b6b-7c9d-463d-a912-8921b59022bc req-e3f21994-0c3e-444c-a45e-d074108564d4 service nova] Acquired lock "refresh_cache-671f7e6d-703e-48a9-8509-2a8924afe911" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.218572] env[68569]: DEBUG nova.network.neutron [req-1bcf8b6b-7c9d-463d-a912-8921b59022bc req-e3f21994-0c3e-444c-a45e-d074108564d4 service nova] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Refreshing network info cache for port 642bbbb7-8eeb-4920-bfce-44e3f938dddb {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1032.219799] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:e2:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '667a2e97-c1be-421d-9941-6b84c2629b43', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '642bbbb7-8eeb-4920-bfce-44e3f938dddb', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1032.228179] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1032.231526] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1032.232042] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3a5b0193-d116-45c4-b643-61c090aab4da {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.254505] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167536, 'name': Rename_Task, 'duration_secs': 0.455691} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.255722] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1032.255944] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1032.255944] env[68569]: value = "task-3167537" [ 1032.255944] env[68569]: _type = "Task" [ 1032.255944] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.256125] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a592d5d1-a9e4-4444-bc7e-349dd6b1c297 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.265758] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167537, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.266876] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1032.266876] env[68569]: value = "task-3167538" [ 1032.266876] env[68569]: _type = "Task" [ 1032.266876] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.274212] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167538, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.470602] env[68569]: DEBUG nova.network.neutron [req-1bcf8b6b-7c9d-463d-a912-8921b59022bc req-e3f21994-0c3e-444c-a45e-d074108564d4 service nova] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Updated VIF entry in instance network info cache for port 642bbbb7-8eeb-4920-bfce-44e3f938dddb. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1032.471259] env[68569]: DEBUG nova.network.neutron [req-1bcf8b6b-7c9d-463d-a912-8921b59022bc req-e3f21994-0c3e-444c-a45e-d074108564d4 service nova] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Updating instance_info_cache with network_info: [{"id": "642bbbb7-8eeb-4920-bfce-44e3f938dddb", "address": "fa:16:3e:f2:e2:d9", "network": {"id": "bf7e9923-9223-4b14-8176-d3910d2b1c90", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1728039945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62ff17f9dcc242e0aff061402e57bdcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap642bbbb7-8e", "ovs_interfaceid": "642bbbb7-8eeb-4920-bfce-44e3f938dddb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.558017] env[68569]: DEBUG nova.scheduler.client.report [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1032.625067] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.625436] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquired lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.625752] env[68569]: DEBUG nova.network.neutron [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1032.767797] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167537, 'name': CreateVM_Task, 'duration_secs': 0.29698} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.770785] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1032.771591] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.771764] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.772126] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1032.772745] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fae2f2a-17f0-4ab5-8003-3380127253c9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.778063] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167538, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.779396] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1032.779396] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5264a8fb-86c1-c012-7909-5834f52129cf" [ 1032.779396] env[68569]: _type = "Task" [ 1032.779396] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.786770] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5264a8fb-86c1-c012-7909-5834f52129cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.974728] env[68569]: DEBUG oslo_concurrency.lockutils [req-1bcf8b6b-7c9d-463d-a912-8921b59022bc req-e3f21994-0c3e-444c-a45e-d074108564d4 service nova] Releasing lock "refresh_cache-671f7e6d-703e-48a9-8509-2a8924afe911" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.064030] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.331s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.064537] env[68569]: DEBUG nova.compute.manager [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1033.070683] env[68569]: DEBUG oslo_concurrency.lockutils [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.139s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.070929] env[68569]: DEBUG nova.objects.instance [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lazy-loading 'resources' on Instance uuid ae92919c-f2eb-4231-afea-b23269e09a0a {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1033.276997] env[68569]: DEBUG oslo_vmware.api [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167538, 'name': PowerOnVM_Task, 'duration_secs': 0.654792} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.277261] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1033.277463] env[68569]: INFO nova.compute.manager [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Took 5.81 seconds to spawn the instance on the hypervisor. [ 1033.277638] env[68569]: DEBUG nova.compute.manager [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1033.278422] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d040be0-dc23-4dc3-9da5-74c5b912a115 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.294717] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5264a8fb-86c1-c012-7909-5834f52129cf, 'name': SearchDatastore_Task, 'duration_secs': 0.009467} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.295223] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.295449] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1033.295676] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.295824] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.296011] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1033.296267] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccd16d02-f9f3-4d4f-a9c0-f37fabc589e6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.304811] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1033.304996] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1033.305789] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98a3607e-2d3d-4a69-a9ea-052ee877d7a2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.311258] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1033.311258] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bd915e-20d3-93b7-b71f-5bbda99d885f" [ 1033.311258] env[68569]: _type = "Task" [ 1033.311258] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.318615] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bd915e-20d3-93b7-b71f-5bbda99d885f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.319788] env[68569]: DEBUG nova.network.neutron [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Updating instance_info_cache with network_info: [{"id": "d13b7716-5b05-4896-9da9-e3674d55a3a0", "address": "fa:16:3e:c3:61:8b", "network": {"id": "63b6f3d4-66a9-477c-b7a6-566cf0d9ab2d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-638108957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b924e47d91a488e9d912a5369792aa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd13b7716-5b", "ovs_interfaceid": "d13b7716-5b05-4896-9da9-e3674d55a3a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.344388] env[68569]: DEBUG nova.compute.manager [req-8eaebfda-c62f-4def-b9b6-50f2c195e44c req-08952c21-5094-45e6-99ac-c47d02891d45 service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Received event network-vif-plugged-d13b7716-5b05-4896-9da9-e3674d55a3a0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1033.344600] env[68569]: DEBUG oslo_concurrency.lockutils [req-8eaebfda-c62f-4def-b9b6-50f2c195e44c req-08952c21-5094-45e6-99ac-c47d02891d45 service nova] Acquiring lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.344813] env[68569]: DEBUG oslo_concurrency.lockutils [req-8eaebfda-c62f-4def-b9b6-50f2c195e44c req-08952c21-5094-45e6-99ac-c47d02891d45 service nova] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.344979] env[68569]: DEBUG oslo_concurrency.lockutils [req-8eaebfda-c62f-4def-b9b6-50f2c195e44c req-08952c21-5094-45e6-99ac-c47d02891d45 service nova] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.345165] env[68569]: DEBUG nova.compute.manager [req-8eaebfda-c62f-4def-b9b6-50f2c195e44c req-08952c21-5094-45e6-99ac-c47d02891d45 service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] No waiting events found dispatching network-vif-plugged-d13b7716-5b05-4896-9da9-e3674d55a3a0 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1033.345331] env[68569]: WARNING nova.compute.manager [req-8eaebfda-c62f-4def-b9b6-50f2c195e44c req-08952c21-5094-45e6-99ac-c47d02891d45 service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Received unexpected event network-vif-plugged-d13b7716-5b05-4896-9da9-e3674d55a3a0 for instance with vm_state shelved_offloaded and task_state spawning. [ 1033.345501] env[68569]: DEBUG nova.compute.manager [req-8eaebfda-c62f-4def-b9b6-50f2c195e44c req-08952c21-5094-45e6-99ac-c47d02891d45 service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Received event network-changed-d13b7716-5b05-4896-9da9-e3674d55a3a0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1033.345655] env[68569]: DEBUG nova.compute.manager [req-8eaebfda-c62f-4def-b9b6-50f2c195e44c req-08952c21-5094-45e6-99ac-c47d02891d45 service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Refreshing instance network info cache due to event network-changed-d13b7716-5b05-4896-9da9-e3674d55a3a0. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1033.345819] env[68569]: DEBUG oslo_concurrency.lockutils [req-8eaebfda-c62f-4def-b9b6-50f2c195e44c req-08952c21-5094-45e6-99ac-c47d02891d45 service nova] Acquiring lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.572266] env[68569]: DEBUG nova.compute.utils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1033.573677] env[68569]: DEBUG nova.compute.manager [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1033.574393] env[68569]: DEBUG nova.network.neutron [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1033.630463] env[68569]: DEBUG nova.policy [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69f4475c53cd434cbad10ee97213193d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83427ebdc6364f7887409f7bfc35f025', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1033.804327] env[68569]: INFO nova.compute.manager [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Took 24.16 seconds to build instance. [ 1033.828072] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Releasing lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.829928] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bd915e-20d3-93b7-b71f-5bbda99d885f, 'name': SearchDatastore_Task, 'duration_secs': 0.010758} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.832824] env[68569]: DEBUG oslo_concurrency.lockutils [req-8eaebfda-c62f-4def-b9b6-50f2c195e44c req-08952c21-5094-45e6-99ac-c47d02891d45 service nova] Acquired lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.832824] env[68569]: DEBUG nova.network.neutron [req-8eaebfda-c62f-4def-b9b6-50f2c195e44c req-08952c21-5094-45e6-99ac-c47d02891d45 service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Refreshing network info cache for port d13b7716-5b05-4896-9da9-e3674d55a3a0 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1033.833710] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75dd6d4b-fa12-469b-9a47-f7779f41203e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.836431] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e68ed80f-d800-4b8d-84a5-0d1b057bab73 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.844564] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1033.844564] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]528a2d42-8d2f-77ad-dd73-9bdb852f4aba" [ 1033.844564] env[68569]: _type = "Task" [ 1033.844564] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.845482] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607cb86c-0260-4a6f-83f7-a0797960cb78 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.858294] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]528a2d42-8d2f-77ad-dd73-9bdb852f4aba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.898801] env[68569]: DEBUG nova.virt.hardware [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='060f6a1e7cf5ccee7871fb5f1fa12046',container_format='bare',created_at=2025-03-26T04:57:08Z,direct_url=,disk_format='vmdk',id=f1fc5b9e-4d0a-4b68-b39b-3d0785190c06,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-69829400-shelved',owner='8b924e47d91a488e9d912a5369792aa5',properties=ImageMetaProps,protected=,size=31664640,status='active',tags=,updated_at=2025-03-26T04:57:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1033.900466] env[68569]: DEBUG nova.virt.hardware [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1033.900466] env[68569]: DEBUG nova.virt.hardware [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1033.900466] env[68569]: DEBUG nova.virt.hardware [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1033.900466] env[68569]: DEBUG nova.virt.hardware [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1033.900640] env[68569]: DEBUG nova.virt.hardware [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1033.900818] env[68569]: DEBUG nova.virt.hardware [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1033.901011] env[68569]: DEBUG nova.virt.hardware [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1033.901199] env[68569]: DEBUG nova.virt.hardware [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1033.901555] env[68569]: DEBUG nova.virt.hardware [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1033.901892] env[68569]: DEBUG nova.virt.hardware [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1033.903598] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4b293f-b143-4191-9f6a-b60b558cc0db {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.910146] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a4a109-6ad9-45fb-9e57-d47646db4a84 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.920072] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3edd3357-82a0-454c-a38e-4c3d1b283f88 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.923665] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35167175-6cb3-4e4a-9735-5534789070da {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.937335] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:61:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5fe645c-e088-401e-ab53-4ae2981dea72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd13b7716-5b05-4896-9da9-e3674d55a3a0', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.944597] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1033.952196] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1033.952586] env[68569]: DEBUG nova.compute.provider_tree [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.954411] env[68569]: DEBUG nova.network.neutron [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Successfully created port: 2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1033.955765] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-312684cd-dc0d-417c-b277-1fb378b98c8d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.970935] env[68569]: DEBUG nova.scheduler.client.report [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1033.980875] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.980875] env[68569]: value = "task-3167540" [ 1033.980875] env[68569]: _type = "Task" [ 1033.980875] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.989581] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167540, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.081320] env[68569]: DEBUG nova.compute.manager [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1034.300018] env[68569]: DEBUG nova.network.neutron [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Successfully created port: ea020883-6378-448b-acd5-bd2f12b42b10 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1034.305783] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6a9664d8-93dc-45bb-ab02-8bb2f4429e37 tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.672s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.358433] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]528a2d42-8d2f-77ad-dd73-9bdb852f4aba, 'name': SearchDatastore_Task, 'duration_secs': 0.02015} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.358667] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.358916] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 671f7e6d-703e-48a9-8509-2a8924afe911/671f7e6d-703e-48a9-8509-2a8924afe911.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1034.359189] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f99c3dea-7c67-40cf-bc94-642ed279dfff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.365256] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1034.365256] env[68569]: value = "task-3167541" [ 1034.365256] env[68569]: _type = "Task" [ 1034.365256] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.375823] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167541, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.477935] env[68569]: DEBUG oslo_concurrency.lockutils [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.407s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.481945] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.685s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.482260] env[68569]: DEBUG nova.objects.instance [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lazy-loading 'resources' on Instance uuid 705bbc03-30ac-4d5b-a3f0-6505171a69fc {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1034.495761] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167540, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.512584] env[68569]: INFO nova.scheduler.client.report [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleted allocations for instance ae92919c-f2eb-4231-afea-b23269e09a0a [ 1034.561111] env[68569]: DEBUG nova.network.neutron [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Successfully created port: 5d01ae9c-544e-4483-8eaf-ac68ad7945ec {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1034.575991] env[68569]: DEBUG nova.network.neutron [req-8eaebfda-c62f-4def-b9b6-50f2c195e44c req-08952c21-5094-45e6-99ac-c47d02891d45 service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Updated VIF entry in instance network info cache for port d13b7716-5b05-4896-9da9-e3674d55a3a0. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1034.575991] env[68569]: DEBUG nova.network.neutron [req-8eaebfda-c62f-4def-b9b6-50f2c195e44c req-08952c21-5094-45e6-99ac-c47d02891d45 service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Updating instance_info_cache with network_info: [{"id": "d13b7716-5b05-4896-9da9-e3674d55a3a0", "address": "fa:16:3e:c3:61:8b", "network": {"id": "63b6f3d4-66a9-477c-b7a6-566cf0d9ab2d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-638108957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b924e47d91a488e9d912a5369792aa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd13b7716-5b", "ovs_interfaceid": "d13b7716-5b05-4896-9da9-e3674d55a3a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.727023] env[68569]: INFO nova.compute.manager [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Rebuilding instance [ 1034.789053] env[68569]: DEBUG nova.compute.manager [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1034.790203] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35718217-f0e8-4a0e-a0d6-cbe0c6507fd2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.879432] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167541, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.997024] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167540, 'name': CreateVM_Task, 'duration_secs': 0.612026} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.997279] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1034.998061] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.998061] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.998429] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1034.998965] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1c1bdc7-8a7a-4c26-a123-3516546a7c14 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.003196] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 1035.003196] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a35b39-ced9-5cdf-0ead-a370da3912a8" [ 1035.003196] env[68569]: _type = "Task" [ 1035.003196] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.008155] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1035.008372] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633684', 'volume_id': '90a6a766-5e36-48f2-84f2-a3b3be1783a7', 'name': 'volume-90a6a766-5e36-48f2-84f2-a3b3be1783a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948', 'attached_at': '', 'detached_at': '', 'volume_id': '90a6a766-5e36-48f2-84f2-a3b3be1783a7', 'serial': '90a6a766-5e36-48f2-84f2-a3b3be1783a7'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1035.009156] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6c828a-3e95-4103-b4a7-cf8f93ebbc4f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.014412] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a35b39-ced9-5cdf-0ead-a370da3912a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.033317] env[68569]: DEBUG oslo_concurrency.lockutils [None req-be3c953b-1136-4cd6-acbc-1f66d011d012 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "ae92919c-f2eb-4231-afea-b23269e09a0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.009s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.034733] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f328527d-768b-43e7-8cb6-c62d139f3f4a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.059945] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] volume-90a6a766-5e36-48f2-84f2-a3b3be1783a7/volume-90a6a766-5e36-48f2-84f2-a3b3be1783a7.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1035.063159] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c459f74-3ccf-4b02-ab63-a1c8fdd33bf8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.077739] env[68569]: DEBUG oslo_concurrency.lockutils [req-8eaebfda-c62f-4def-b9b6-50f2c195e44c req-08952c21-5094-45e6-99ac-c47d02891d45 service nova] Releasing lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.080902] env[68569]: DEBUG oslo_vmware.api [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1035.080902] env[68569]: value = "task-3167542" [ 1035.080902] env[68569]: _type = "Task" [ 1035.080902] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.090539] env[68569]: DEBUG oslo_vmware.api [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167542, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.092686] env[68569]: DEBUG nova.compute.manager [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1035.113864] env[68569]: DEBUG nova.virt.hardware [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1035.114134] env[68569]: DEBUG nova.virt.hardware [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1035.114318] env[68569]: DEBUG nova.virt.hardware [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1035.114471] env[68569]: DEBUG nova.virt.hardware [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1035.114976] env[68569]: DEBUG nova.virt.hardware [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1035.114976] env[68569]: DEBUG nova.virt.hardware [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1035.114976] env[68569]: DEBUG nova.virt.hardware [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1035.115122] env[68569]: DEBUG nova.virt.hardware [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1035.115291] env[68569]: DEBUG nova.virt.hardware [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1035.115454] env[68569]: DEBUG nova.virt.hardware [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1035.115626] env[68569]: DEBUG nova.virt.hardware [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1035.116556] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c20844-e13d-487a-8620-68ab29366651 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.126378] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca34d3d-2bbc-44a9-b61d-ff2e6c0516f8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.256086] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec713323-4bb7-45da-91c6-6874c6f5a482 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.264513] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b94d95-2118-49af-9392-76058ccc5a76 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.297396] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad40fe8f-c9b5-454a-8014-a27e57febf82 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.305495] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9692f0c2-c083-4b46-90f6-154d13173c17 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.320042] env[68569]: DEBUG nova.compute.provider_tree [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1035.378283] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167541, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538434} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.379032] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 671f7e6d-703e-48a9-8509-2a8924afe911/671f7e6d-703e-48a9-8509-2a8924afe911.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1035.379032] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1035.379241] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f3cdaf53-01fc-4a5b-8e7b-b27ffbef6522 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.387854] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1035.387854] env[68569]: value = "task-3167543" [ 1035.387854] env[68569]: _type = "Task" [ 1035.387854] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.396618] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167543, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.515956] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.516278] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Processing image f1fc5b9e-4d0a-4b68-b39b-3d0785190c06 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1035.516583] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.516778] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.517015] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1035.517322] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e02c31f7-1e96-41ec-8940-a4eacd18316e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.534043] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1035.534273] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1035.535292] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b6e5e7d-1882-47bf-bc5d-845227ae97b3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.541045] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 1035.541045] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52968dbd-0d67-378d-b6d4-63f7da488456" [ 1035.541045] env[68569]: _type = "Task" [ 1035.541045] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.549961] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52968dbd-0d67-378d-b6d4-63f7da488456, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.589697] env[68569]: DEBUG oslo_vmware.api [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167542, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.805697] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1035.806067] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5b6affe9-5317-4655-8f83-e458bc65449c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.813679] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1035.813679] env[68569]: value = "task-3167544" [ 1035.813679] env[68569]: _type = "Task" [ 1035.813679] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.822917] env[68569]: DEBUG nova.scheduler.client.report [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1035.825806] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.897820] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167543, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074225} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.898249] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1035.899123] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501fe3be-c1c7-4d1b-ab4a-cc420e10e6b5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.921368] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 671f7e6d-703e-48a9-8509-2a8924afe911/671f7e6d-703e-48a9-8509-2a8924afe911.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1035.921662] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3d027da-54c9-47cd-a835-69b343a6aff9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.944407] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1035.944407] env[68569]: value = "task-3167545" [ 1035.944407] env[68569]: _type = "Task" [ 1035.944407] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.953896] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167545, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.049513] env[68569]: DEBUG nova.compute.manager [req-80082428-86fa-48ff-b9c7-0b0bd137ac71 req-cc4f6965-13d5-4824-b2c1-21815c3ceff7 service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Received event network-vif-plugged-2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1036.049962] env[68569]: DEBUG oslo_concurrency.lockutils [req-80082428-86fa-48ff-b9c7-0b0bd137ac71 req-cc4f6965-13d5-4824-b2c1-21815c3ceff7 service nova] Acquiring lock "4c122cff-f64c-4e4f-9454-034c44ff246b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.050038] env[68569]: DEBUG oslo_concurrency.lockutils [req-80082428-86fa-48ff-b9c7-0b0bd137ac71 req-cc4f6965-13d5-4824-b2c1-21815c3ceff7 service nova] Lock "4c122cff-f64c-4e4f-9454-034c44ff246b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.050203] env[68569]: DEBUG oslo_concurrency.lockutils [req-80082428-86fa-48ff-b9c7-0b0bd137ac71 req-cc4f6965-13d5-4824-b2c1-21815c3ceff7 service nova] Lock "4c122cff-f64c-4e4f-9454-034c44ff246b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.050539] env[68569]: DEBUG nova.compute.manager [req-80082428-86fa-48ff-b9c7-0b0bd137ac71 req-cc4f6965-13d5-4824-b2c1-21815c3ceff7 service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] No waiting events found dispatching network-vif-plugged-2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1036.050612] env[68569]: WARNING nova.compute.manager [req-80082428-86fa-48ff-b9c7-0b0bd137ac71 req-cc4f6965-13d5-4824-b2c1-21815c3ceff7 service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Received unexpected event network-vif-plugged-2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f for instance with vm_state building and task_state spawning. [ 1036.057597] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Preparing fetch location {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1036.057597] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Fetch image to [datastore2] OSTACK_IMG_e2f80d3d-0621-4990-96d5-7b49dd0091d0/OSTACK_IMG_e2f80d3d-0621-4990-96d5-7b49dd0091d0.vmdk {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1036.057984] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Downloading stream optimized image f1fc5b9e-4d0a-4b68-b39b-3d0785190c06 to [datastore2] OSTACK_IMG_e2f80d3d-0621-4990-96d5-7b49dd0091d0/OSTACK_IMG_e2f80d3d-0621-4990-96d5-7b49dd0091d0.vmdk on the data store datastore2 as vApp {{(pid=68569) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1036.057984] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Downloading image file data f1fc5b9e-4d0a-4b68-b39b-3d0785190c06 to the ESX as VM named 'OSTACK_IMG_e2f80d3d-0621-4990-96d5-7b49dd0091d0' {{(pid=68569) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1036.117031] env[68569]: DEBUG oslo_vmware.api [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167542, 'name': ReconfigVM_Task, 'duration_secs': 0.953045} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.117031] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Reconfigured VM instance instance-00000054 to attach disk [datastore2] volume-90a6a766-5e36-48f2-84f2-a3b3be1783a7/volume-90a6a766-5e36-48f2-84f2-a3b3be1783a7.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1036.119679] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b1e5d29-819b-4f49-8e1c-a349fcb12adc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.137527] env[68569]: DEBUG nova.network.neutron [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Successfully updated port: 2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1036.146745] env[68569]: DEBUG oslo_vmware.api [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1036.146745] env[68569]: value = "task-3167546" [ 1036.146745] env[68569]: _type = "Task" [ 1036.146745] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.157296] env[68569]: DEBUG oslo_vmware.api [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167546, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.163195] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1036.163195] env[68569]: value = "resgroup-9" [ 1036.163195] env[68569]: _type = "ResourcePool" [ 1036.163195] env[68569]: }. {{(pid=68569) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1036.163452] env[68569]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-4a9b3b10-eae6-4b2f-b9f8-cca4d1227dab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.186161] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lease: (returnval){ [ 1036.186161] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b0b690-d545-5846-ce53-2e63b87246c8" [ 1036.186161] env[68569]: _type = "HttpNfcLease" [ 1036.186161] env[68569]: } obtained for vApp import into resource pool (val){ [ 1036.186161] env[68569]: value = "resgroup-9" [ 1036.186161] env[68569]: _type = "ResourcePool" [ 1036.186161] env[68569]: }. {{(pid=68569) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1036.186616] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the lease: (returnval){ [ 1036.186616] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b0b690-d545-5846-ce53-2e63b87246c8" [ 1036.186616] env[68569]: _type = "HttpNfcLease" [ 1036.186616] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1036.193986] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1036.193986] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b0b690-d545-5846-ce53-2e63b87246c8" [ 1036.193986] env[68569]: _type = "HttpNfcLease" [ 1036.193986] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1036.324024] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.326834] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.845s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.329733] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.068s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.355388] env[68569]: INFO nova.scheduler.client.report [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleted allocations for instance 705bbc03-30ac-4d5b-a3f0-6505171a69fc [ 1036.463305] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167545, 'name': ReconfigVM_Task, 'duration_secs': 0.278425} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.466391] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 671f7e6d-703e-48a9-8509-2a8924afe911/671f7e6d-703e-48a9-8509-2a8924afe911.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1036.467424] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6194d6e2-d9ec-484f-8387-466b157acb85 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.473995] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1036.473995] env[68569]: value = "task-3167548" [ 1036.473995] env[68569]: _type = "Task" [ 1036.473995] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.520841] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "interface-6824efd5-427b-420d-83d5-a1d5acd94bf9-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.521152] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-6824efd5-427b-420d-83d5-a1d5acd94bf9-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.521516] env[68569]: DEBUG nova.objects.instance [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'flavor' on Instance uuid 6824efd5-427b-420d-83d5-a1d5acd94bf9 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1036.604242] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570a3711-2c43-46c7-a90c-d4899b909508 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.612358] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c692cc9-f3db-495b-b72c-ba7f85ef63ab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.646442] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee9298f-fadf-4e85-b0e7-1437b31aefc7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.660577] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a903bd7b-2e12-49fa-8ee0-25cadea45f48 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.664647] env[68569]: DEBUG oslo_vmware.api [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167546, 'name': ReconfigVM_Task, 'duration_secs': 0.158386} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.665244] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633684', 'volume_id': '90a6a766-5e36-48f2-84f2-a3b3be1783a7', 'name': 'volume-90a6a766-5e36-48f2-84f2-a3b3be1783a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948', 'attached_at': '', 'detached_at': '', 'volume_id': '90a6a766-5e36-48f2-84f2-a3b3be1783a7', 'serial': '90a6a766-5e36-48f2-84f2-a3b3be1783a7'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1036.676314] env[68569]: DEBUG nova.compute.provider_tree [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.694112] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1036.694112] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b0b690-d545-5846-ce53-2e63b87246c8" [ 1036.694112] env[68569]: _type = "HttpNfcLease" [ 1036.694112] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1036.827954] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167544, 'name': PowerOffVM_Task, 'duration_secs': 0.660982} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.828377] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1036.828733] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.829896] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47e1ad8-2d77-48b4-9d5c-d101fc4ac460 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.836573] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1036.836839] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f548c3db-0f81-4406-bd16-1e09780134fa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.862880] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1036.863122] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1036.863310] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Deleting the datastore file [datastore2] 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1036.865667] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-268023e6-1bfa-4e66-9d5f-88467a905c1a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.868123] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9d439759-fbb9-4d36-a465-492aa037c7f0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "705bbc03-30ac-4d5b-a3f0-6505171a69fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.454s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.874220] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1036.874220] env[68569]: value = "task-3167550" [ 1036.874220] env[68569]: _type = "Task" [ 1036.874220] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.881492] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167550, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.984144] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167548, 'name': Rename_Task, 'duration_secs': 0.155441} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.984440] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1036.984685] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4397197-ff26-4428-a1b4-604ecbb8a7b3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.990163] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1036.990163] env[68569]: value = "task-3167551" [ 1036.990163] env[68569]: _type = "Task" [ 1036.990163] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.997557] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167551, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.090340] env[68569]: DEBUG nova.objects.instance [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'pci_requests' on Instance uuid 6824efd5-427b-420d-83d5-a1d5acd94bf9 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.179772] env[68569]: DEBUG nova.scheduler.client.report [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1037.195837] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1037.195837] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b0b690-d545-5846-ce53-2e63b87246c8" [ 1037.195837] env[68569]: _type = "HttpNfcLease" [ 1037.195837] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1037.196013] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1037.196013] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b0b690-d545-5846-ce53-2e63b87246c8" [ 1037.196013] env[68569]: _type = "HttpNfcLease" [ 1037.196013] env[68569]: }. {{(pid=68569) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1037.197763] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8cb0cfa-32be-432f-b9b0-5cc2882037c3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.205306] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52512aa9-0db1-cd34-0253-05368d8392d5/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1037.205611] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Creating HTTP connection to write to file with size = 31664640 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52512aa9-0db1-cd34-0253-05368d8392d5/disk-0.vmdk. {{(pid=68569) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1037.273357] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5e2966c5-8ae7-4bd7-a1ea-ecd13e7aa7c5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.384042] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167550, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096124} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.384042] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.384042] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1037.384255] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1037.500404] env[68569]: DEBUG oslo_vmware.api [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167551, 'name': PowerOnVM_Task, 'duration_secs': 0.479641} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.500673] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1037.500917] env[68569]: INFO nova.compute.manager [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Took 7.61 seconds to spawn the instance on the hypervisor. [ 1037.501136] env[68569]: DEBUG nova.compute.manager [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1037.501892] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c757675-9cee-4556-bffb-c67297aa4cd1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.592921] env[68569]: DEBUG nova.objects.base [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Object Instance<6824efd5-427b-420d-83d5-a1d5acd94bf9> lazy-loaded attributes: flavor,pci_requests {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1037.593291] env[68569]: DEBUG nova.network.neutron [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1037.646554] env[68569]: DEBUG nova.policy [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b5878b8c7304fce9e150e9be38f10c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7613bcf5361d4b08a8d864e59b7fe858', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1037.686297] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.357s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.686593] env[68569]: INFO nova.compute.manager [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Successfully reverted task state from resize_migrating on failure for instance. [ 1037.697353] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.544s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.699040] env[68569]: INFO nova.compute.claims [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1037.702799] env[68569]: ERROR oslo_messaging.rpc.server [None req-a95b707f-4dcd-4cf0-9a4c-67ab7c8ea36d tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Exception during message handling: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1037.702799] env[68569]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1037.702799] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 1037.702799] env[68569]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1037.702799] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1037.702799] env[68569]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1037.702799] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1037.702799] env[68569]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1037.702799] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1037.702799] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1037.702799] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1037.702799] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1037.702799] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1037.702799] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1037.702799] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 168, in decorated_function [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 159, in decorated_function [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 215, in decorated_function [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1037.703273] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 205, in decorated_function [ 1037.703665] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1037.703665] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6583, in resize_instance [ 1037.703665] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1037.703665] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1037.703665] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1037.703665] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1037.703665] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1037.703665] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6580, in resize_instance [ 1037.703665] env[68569]: ERROR oslo_messaging.rpc.server self._resize_instance(context, instance, image, migration, [ 1037.703665] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 1037.703665] env[68569]: ERROR oslo_messaging.rpc.server disk_info = self.driver.migrate_disk_and_power_off( [ 1037.703665] env[68569]: ERROR oslo_messaging.rpc.server TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1037.703665] env[68569]: ERROR oslo_messaging.rpc.server [ 1037.712701] env[68569]: DEBUG nova.objects.instance [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lazy-loading 'flavor' on Instance uuid 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.754021] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.754380] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.923213] env[68569]: DEBUG nova.network.neutron [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Successfully created port: ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1038.020857] env[68569]: INFO nova.compute.manager [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Took 24.32 seconds to build instance. [ 1038.086638] env[68569]: DEBUG nova.compute.manager [req-9c886700-a3dd-44a2-b4f5-e05d0cb9db37 req-b66f2875-2466-4b23-91f5-ac6487053eac service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Received event network-changed-2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1038.086901] env[68569]: DEBUG nova.compute.manager [req-9c886700-a3dd-44a2-b4f5-e05d0cb9db37 req-b66f2875-2466-4b23-91f5-ac6487053eac service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Refreshing instance network info cache due to event network-changed-2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1038.087484] env[68569]: DEBUG oslo_concurrency.lockutils [req-9c886700-a3dd-44a2-b4f5-e05d0cb9db37 req-b66f2875-2466-4b23-91f5-ac6487053eac service nova] Acquiring lock "refresh_cache-4c122cff-f64c-4e4f-9454-034c44ff246b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.087734] env[68569]: DEBUG oslo_concurrency.lockutils [req-9c886700-a3dd-44a2-b4f5-e05d0cb9db37 req-b66f2875-2466-4b23-91f5-ac6487053eac service nova] Acquired lock "refresh_cache-4c122cff-f64c-4e4f-9454-034c44ff246b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.088008] env[68569]: DEBUG nova.network.neutron [req-9c886700-a3dd-44a2-b4f5-e05d0cb9db37 req-b66f2875-2466-4b23-91f5-ac6487053eac service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Refreshing network info cache for port 2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1038.179905] env[68569]: DEBUG nova.network.neutron [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Successfully updated port: ea020883-6378-448b-acd5-bd2f12b42b10 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1038.218723] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c76446ed-2f96-440f-8795-cc3a88a480c2 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.809s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.259897] env[68569]: DEBUG nova.compute.manager [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1038.423924] env[68569]: DEBUG nova.virt.hardware [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1038.424254] env[68569]: DEBUG nova.virt.hardware [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1038.424520] env[68569]: DEBUG nova.virt.hardware [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1038.424761] env[68569]: DEBUG nova.virt.hardware [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1038.424945] env[68569]: DEBUG nova.virt.hardware [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1038.425207] env[68569]: DEBUG nova.virt.hardware [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1038.425520] env[68569]: DEBUG nova.virt.hardware [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1038.425741] env[68569]: DEBUG nova.virt.hardware [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1038.426031] env[68569]: DEBUG nova.virt.hardware [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1038.426289] env[68569]: DEBUG nova.virt.hardware [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1038.426536] env[68569]: DEBUG nova.virt.hardware [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1038.428127] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1090a920-f0ee-4be9-9f97-ce8a9542f885 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.444769] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4fb60ac-d94d-4543-a8e7-4c94c0b874ac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.460304] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1038.467173] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1038.470055] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1038.470369] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc3d0ac9-9ee9-4c5d-8cb3-53c5d7d7a60f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.492658] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1038.492658] env[68569]: value = "task-3167552" [ 1038.492658] env[68569]: _type = "Task" [ 1038.492658] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.504154] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167552, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.523523] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c0f08bc4-e0a7-4e4c-90c1-7e2bba9904e9 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "671f7e6d-703e-48a9-8509-2a8924afe911" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.840s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.642852] env[68569]: DEBUG nova.network.neutron [req-9c886700-a3dd-44a2-b4f5-e05d0cb9db37 req-b66f2875-2466-4b23-91f5-ac6487053eac service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1038.665640] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Completed reading data from the image iterator. {{(pid=68569) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1038.666083] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52512aa9-0db1-cd34-0253-05368d8392d5/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1038.667164] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813b31c7-cb20-4f60-ba92-564380eea832 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.675277] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52512aa9-0db1-cd34-0253-05368d8392d5/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1038.675563] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52512aa9-0db1-cd34-0253-05368d8392d5/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1038.675805] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-79a69cbc-5482-4f3f-b473-d1cbb55dcfa1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.773090] env[68569]: DEBUG nova.network.neutron [req-9c886700-a3dd-44a2-b4f5-e05d0cb9db37 req-b66f2875-2466-4b23-91f5-ac6487053eac service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.782813] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.918464] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ea26067b-29bc-473b-8356-e79aa3f19f2d tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.922023] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ea26067b-29bc-473b-8356-e79aa3f19f2d tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.922023] env[68569]: DEBUG nova.compute.manager [None req-ea26067b-29bc-473b-8356-e79aa3f19f2d tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1038.923378] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d206b2-14f7-45da-bd00-006bb412bdee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.931039] env[68569]: DEBUG nova.compute.manager [None req-ea26067b-29bc-473b-8356-e79aa3f19f2d tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68569) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1038.931600] env[68569]: DEBUG nova.objects.instance [None req-ea26067b-29bc-473b-8356-e79aa3f19f2d tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lazy-loading 'flavor' on Instance uuid 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1038.988335] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c06399-f108-404b-8e99-65d0492c2ef4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.002283] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50b964d-b69b-4156-8b39-d6f73cc8d59b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.009300] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167552, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.040383] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ec6f88-f57f-4a55-834b-897ea5bdaa5f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.045725] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52512aa9-0db1-cd34-0253-05368d8392d5/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1039.046060] env[68569]: INFO nova.virt.vmwareapi.images [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Downloaded image file data f1fc5b9e-4d0a-4b68-b39b-3d0785190c06 [ 1039.048643] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba6393a-1ea8-4c1f-bfa2-795ec573480a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.052291] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302b51b5-af94-4dca-9c46-24513f01d3a4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.078351] env[68569]: DEBUG nova.compute.provider_tree [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1039.080085] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94184ce9-d488-4c8c-96e5-1c2f94c810b8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.114485] env[68569]: INFO nova.virt.vmwareapi.images [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] The imported VM was unregistered [ 1039.116927] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Caching image {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1039.117154] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Creating directory with path [datastore2] devstack-image-cache_base/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06 {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1039.117417] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-efd837fd-f013-4fc9-87b2-96dd9350c6a4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.147677] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Created directory with path [datastore2] devstack-image-cache_base/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06 {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1039.147908] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_e2f80d3d-0621-4990-96d5-7b49dd0091d0/OSTACK_IMG_e2f80d3d-0621-4990-96d5-7b49dd0091d0.vmdk to [datastore2] devstack-image-cache_base/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06.vmdk. {{(pid=68569) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1039.148191] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-6731bb11-735f-4aac-8ec3-cf7a4e5d66ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.155269] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 1039.155269] env[68569]: value = "task-3167554" [ 1039.155269] env[68569]: _type = "Task" [ 1039.155269] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.162612] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167554, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.276113] env[68569]: DEBUG oslo_concurrency.lockutils [req-9c886700-a3dd-44a2-b4f5-e05d0cb9db37 req-b66f2875-2466-4b23-91f5-ac6487053eac service nova] Releasing lock "refresh_cache-4c122cff-f64c-4e4f-9454-034c44ff246b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.407058] env[68569]: DEBUG nova.network.neutron [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Successfully updated port: ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1039.503157] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167552, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.531555] env[68569]: DEBUG nova.compute.manager [req-4c54f6ff-8cb6-40e7-99ae-3ce5e39114a6 req-4f71fb6b-0701-40f4-96d0-7c98388ee5b3 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Received event network-vif-plugged-ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1039.531859] env[68569]: DEBUG oslo_concurrency.lockutils [req-4c54f6ff-8cb6-40e7-99ae-3ce5e39114a6 req-4f71fb6b-0701-40f4-96d0-7c98388ee5b3 service nova] Acquiring lock "6824efd5-427b-420d-83d5-a1d5acd94bf9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.532180] env[68569]: DEBUG oslo_concurrency.lockutils [req-4c54f6ff-8cb6-40e7-99ae-3ce5e39114a6 req-4f71fb6b-0701-40f4-96d0-7c98388ee5b3 service nova] Lock "6824efd5-427b-420d-83d5-a1d5acd94bf9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.532417] env[68569]: DEBUG oslo_concurrency.lockutils [req-4c54f6ff-8cb6-40e7-99ae-3ce5e39114a6 req-4f71fb6b-0701-40f4-96d0-7c98388ee5b3 service nova] Lock "6824efd5-427b-420d-83d5-a1d5acd94bf9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.532633] env[68569]: DEBUG nova.compute.manager [req-4c54f6ff-8cb6-40e7-99ae-3ce5e39114a6 req-4f71fb6b-0701-40f4-96d0-7c98388ee5b3 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] No waiting events found dispatching network-vif-plugged-ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1039.532887] env[68569]: WARNING nova.compute.manager [req-4c54f6ff-8cb6-40e7-99ae-3ce5e39114a6 req-4f71fb6b-0701-40f4-96d0-7c98388ee5b3 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Received unexpected event network-vif-plugged-ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11 for instance with vm_state active and task_state None. [ 1039.583241] env[68569]: DEBUG nova.scheduler.client.report [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1039.665622] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167554, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.910183] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.910391] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.910573] env[68569]: DEBUG nova.network.neutron [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.928132] env[68569]: DEBUG nova.compute.manager [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1039.929348] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06f40c1-b30d-4e9a-b412-1c20a4f1998c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.938421] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea26067b-29bc-473b-8356-e79aa3f19f2d tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1039.941094] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e3cf7f3-1039-4df1-84a7-0ea655cefe04 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.950404] env[68569]: DEBUG oslo_vmware.api [None req-ea26067b-29bc-473b-8356-e79aa3f19f2d tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1039.950404] env[68569]: value = "task-3167555" [ 1039.950404] env[68569]: _type = "Task" [ 1039.950404] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.962202] env[68569]: DEBUG oslo_vmware.api [None req-ea26067b-29bc-473b-8356-e79aa3f19f2d tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167555, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.004943] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167552, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.089089] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.392s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.089908] env[68569]: DEBUG nova.compute.manager [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1040.093462] env[68569]: DEBUG oslo_concurrency.lockutils [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.302s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.093801] env[68569]: DEBUG nova.objects.instance [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lazy-loading 'resources' on Instance uuid 1689f1f3-53f2-4c02-a969-e4dae21f14b7 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1040.167120] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167554, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.224134] env[68569]: DEBUG nova.compute.manager [req-689fa10a-93d3-4349-a2a7-7d176ca4e0d9 req-e71e6c44-8436-49f9-99e2-4ec2379d524a service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Received event network-vif-plugged-ea020883-6378-448b-acd5-bd2f12b42b10 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1040.224354] env[68569]: DEBUG oslo_concurrency.lockutils [req-689fa10a-93d3-4349-a2a7-7d176ca4e0d9 req-e71e6c44-8436-49f9-99e2-4ec2379d524a service nova] Acquiring lock "4c122cff-f64c-4e4f-9454-034c44ff246b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.224566] env[68569]: DEBUG oslo_concurrency.lockutils [req-689fa10a-93d3-4349-a2a7-7d176ca4e0d9 req-e71e6c44-8436-49f9-99e2-4ec2379d524a service nova] Lock "4c122cff-f64c-4e4f-9454-034c44ff246b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.224785] env[68569]: DEBUG oslo_concurrency.lockutils [req-689fa10a-93d3-4349-a2a7-7d176ca4e0d9 req-e71e6c44-8436-49f9-99e2-4ec2379d524a service nova] Lock "4c122cff-f64c-4e4f-9454-034c44ff246b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.224980] env[68569]: DEBUG nova.compute.manager [req-689fa10a-93d3-4349-a2a7-7d176ca4e0d9 req-e71e6c44-8436-49f9-99e2-4ec2379d524a service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] No waiting events found dispatching network-vif-plugged-ea020883-6378-448b-acd5-bd2f12b42b10 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1040.225308] env[68569]: WARNING nova.compute.manager [req-689fa10a-93d3-4349-a2a7-7d176ca4e0d9 req-e71e6c44-8436-49f9-99e2-4ec2379d524a service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Received unexpected event network-vif-plugged-ea020883-6378-448b-acd5-bd2f12b42b10 for instance with vm_state building and task_state spawning. [ 1040.225503] env[68569]: DEBUG nova.compute.manager [req-689fa10a-93d3-4349-a2a7-7d176ca4e0d9 req-e71e6c44-8436-49f9-99e2-4ec2379d524a service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Received event network-changed-ea020883-6378-448b-acd5-bd2f12b42b10 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1040.225734] env[68569]: DEBUG nova.compute.manager [req-689fa10a-93d3-4349-a2a7-7d176ca4e0d9 req-e71e6c44-8436-49f9-99e2-4ec2379d524a service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Refreshing instance network info cache due to event network-changed-ea020883-6378-448b-acd5-bd2f12b42b10. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1040.226032] env[68569]: DEBUG oslo_concurrency.lockutils [req-689fa10a-93d3-4349-a2a7-7d176ca4e0d9 req-e71e6c44-8436-49f9-99e2-4ec2379d524a service nova] Acquiring lock "refresh_cache-4c122cff-f64c-4e4f-9454-034c44ff246b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.226212] env[68569]: DEBUG oslo_concurrency.lockutils [req-689fa10a-93d3-4349-a2a7-7d176ca4e0d9 req-e71e6c44-8436-49f9-99e2-4ec2379d524a service nova] Acquired lock "refresh_cache-4c122cff-f64c-4e4f-9454-034c44ff246b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.226410] env[68569]: DEBUG nova.network.neutron [req-689fa10a-93d3-4349-a2a7-7d176ca4e0d9 req-e71e6c44-8436-49f9-99e2-4ec2379d524a service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Refreshing network info cache for port ea020883-6378-448b-acd5-bd2f12b42b10 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1040.331046] env[68569]: DEBUG nova.network.neutron [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Successfully updated port: 5d01ae9c-544e-4483-8eaf-ac68ad7945ec {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1040.445098] env[68569]: INFO nova.compute.manager [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] instance snapshotting [ 1040.451741] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11275d98-45a1-4c4e-8d10-f319ddfba3d6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.465578] env[68569]: WARNING nova.network.neutron [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] 01780a40-9441-415d-988a-24afe70ba382 already exists in list: networks containing: ['01780a40-9441-415d-988a-24afe70ba382']. ignoring it [ 1040.500162] env[68569]: DEBUG oslo_vmware.api [None req-ea26067b-29bc-473b-8356-e79aa3f19f2d tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167555, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.507514] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958e0d38-8f4b-4955-975c-864a4a357421 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.519973] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167552, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.597570] env[68569]: DEBUG nova.compute.utils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1040.605092] env[68569]: DEBUG nova.compute.manager [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1040.605286] env[68569]: DEBUG nova.network.neutron [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1040.658978] env[68569]: DEBUG nova.policy [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afebab35cda9438781e2b466ce586405', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cc82d358e214a959ae6b34c33344b86', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1040.673675] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167554, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.808285] env[68569]: DEBUG nova.network.neutron [req-689fa10a-93d3-4349-a2a7-7d176ca4e0d9 req-e71e6c44-8436-49f9-99e2-4ec2379d524a service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1040.838952] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "refresh_cache-4c122cff-f64c-4e4f-9454-034c44ff246b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.973500] env[68569]: DEBUG oslo_vmware.api [None req-ea26067b-29bc-473b-8356-e79aa3f19f2d tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167555, 'name': PowerOffVM_Task, 'duration_secs': 0.695316} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.977943] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea26067b-29bc-473b-8356-e79aa3f19f2d tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1040.978528] env[68569]: DEBUG nova.compute.manager [None req-ea26067b-29bc-473b-8356-e79aa3f19f2d tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1040.981037] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce8019f-e1b8-439a-9439-1262b09b3d0c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.997033] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57afb9f9-4c8e-48cb-a20d-352703e02117 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.006952] env[68569]: DEBUG nova.network.neutron [req-689fa10a-93d3-4349-a2a7-7d176ca4e0d9 req-e71e6c44-8436-49f9-99e2-4ec2379d524a service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.012398] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a738f14a-5f9d-4f18-b97b-97e262ece613 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.016816] env[68569]: DEBUG nova.network.neutron [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Successfully created port: 5eb797c4-637c-475d-b402-04df004a7182 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1041.023877] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167552, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.029463] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1041.030991] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a2c92d31-aca0-4af0-8a6b-768dd25c880c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.064657] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0c9831-90ed-4a3e-95fc-8fbac7601ce5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.070698] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1041.070698] env[68569]: value = "task-3167556" [ 1041.070698] env[68569]: _type = "Task" [ 1041.070698] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.083011] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea672dae-f3dd-451a-a16a-ba633abab7e3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.087542] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167556, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.100047] env[68569]: DEBUG nova.compute.provider_tree [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.103059] env[68569]: DEBUG nova.compute.manager [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1041.119342] env[68569]: DEBUG nova.network.neutron [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Updating instance_info_cache with network_info: [{"id": "324818fd-bb45-4575-9e4a-bb6516576dde", "address": "fa:16:3e:2b:19:03", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap324818fd-bb", "ovs_interfaceid": "324818fd-bb45-4575-9e4a-bb6516576dde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11", "address": "fa:16:3e:2b:3f:98", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad2700ea-e5", "ovs_interfaceid": "ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.167554] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167554, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.506260] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ea26067b-29bc-473b-8356-e79aa3f19f2d tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.587s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.512853] env[68569]: DEBUG oslo_concurrency.lockutils [req-689fa10a-93d3-4349-a2a7-7d176ca4e0d9 req-e71e6c44-8436-49f9-99e2-4ec2379d524a service nova] Releasing lock "refresh_cache-4c122cff-f64c-4e4f-9454-034c44ff246b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.513332] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167552, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.513582] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquired lock "refresh_cache-4c122cff-f64c-4e4f-9454-034c44ff246b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.513741] env[68569]: DEBUG nova.network.neutron [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1041.583058] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167556, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.607067] env[68569]: DEBUG nova.scheduler.client.report [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1041.622841] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.623769] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.623970] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.624861] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1622d6-b971-4fb9-af13-b1c579f98ad1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.643734] env[68569]: DEBUG nova.virt.hardware [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1041.643993] env[68569]: DEBUG nova.virt.hardware [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1041.644179] env[68569]: DEBUG nova.virt.hardware [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1041.644353] env[68569]: DEBUG nova.virt.hardware [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1041.644499] env[68569]: DEBUG nova.virt.hardware [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1041.644642] env[68569]: DEBUG nova.virt.hardware [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1041.644860] env[68569]: DEBUG nova.virt.hardware [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1041.645020] env[68569]: DEBUG nova.virt.hardware [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1041.645270] env[68569]: DEBUG nova.virt.hardware [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1041.645340] env[68569]: DEBUG nova.virt.hardware [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1041.645508] env[68569]: DEBUG nova.virt.hardware [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1041.652214] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Reconfiguring VM to attach interface {{(pid=68569) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1041.653354] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-880dcacb-305c-45ea-a7bb-441b16f78651 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.678586] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167554, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.680843] env[68569]: DEBUG oslo_vmware.api [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1041.680843] env[68569]: value = "task-3167557" [ 1041.680843] env[68569]: _type = "Task" [ 1041.680843] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.687483] env[68569]: DEBUG oslo_vmware.api [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167557, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.815617] env[68569]: DEBUG nova.compute.manager [req-695308b2-b0b1-40de-ace9-0f5c66b15a56 req-33c76f70-9e89-4432-a779-0cf4b663a35e service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Received event network-changed-ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1041.815869] env[68569]: DEBUG nova.compute.manager [req-695308b2-b0b1-40de-ace9-0f5c66b15a56 req-33c76f70-9e89-4432-a779-0cf4b663a35e service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Refreshing instance network info cache due to event network-changed-ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1041.816182] env[68569]: DEBUG oslo_concurrency.lockutils [req-695308b2-b0b1-40de-ace9-0f5c66b15a56 req-33c76f70-9e89-4432-a779-0cf4b663a35e service nova] Acquiring lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.816334] env[68569]: DEBUG oslo_concurrency.lockutils [req-695308b2-b0b1-40de-ace9-0f5c66b15a56 req-33c76f70-9e89-4432-a779-0cf4b663a35e service nova] Acquired lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.816499] env[68569]: DEBUG nova.network.neutron [req-695308b2-b0b1-40de-ace9-0f5c66b15a56 req-33c76f70-9e89-4432-a779-0cf4b663a35e service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Refreshing network info cache for port ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1041.915647] env[68569]: DEBUG nova.objects.instance [None req-858a181f-7642-4487-b726-458bf5730723 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lazy-loading 'flavor' on Instance uuid 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1042.012583] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167552, 'name': CreateVM_Task, 'duration_secs': 3.295391} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.012741] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1042.013157] env[68569]: DEBUG oslo_concurrency.lockutils [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.013323] env[68569]: DEBUG oslo_concurrency.lockutils [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.013658] env[68569]: DEBUG oslo_concurrency.lockutils [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1042.013936] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5d5460b-7c3e-4052-b133-fd09388ddaea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.020996] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1042.020996] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b87b55-f45c-e208-2840-0611bfb27464" [ 1042.020996] env[68569]: _type = "Task" [ 1042.020996] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.029385] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b87b55-f45c-e208-2840-0611bfb27464, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.077070] env[68569]: DEBUG nova.network.neutron [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1042.085086] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167556, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.117188] env[68569]: DEBUG oslo_concurrency.lockutils [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.024s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.121188] env[68569]: DEBUG nova.compute.manager [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1042.126934] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.905s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.127241] env[68569]: DEBUG nova.objects.instance [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lazy-loading 'resources' on Instance uuid 1670f03a-94e2-4005-be7e-41aad61a8925 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1042.145419] env[68569]: DEBUG nova.virt.hardware [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1042.145829] env[68569]: DEBUG nova.virt.hardware [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1042.145829] env[68569]: DEBUG nova.virt.hardware [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1042.146009] env[68569]: DEBUG nova.virt.hardware [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1042.146166] env[68569]: DEBUG nova.virt.hardware [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1042.146312] env[68569]: DEBUG nova.virt.hardware [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1042.146543] env[68569]: DEBUG nova.virt.hardware [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1042.146663] env[68569]: DEBUG nova.virt.hardware [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1042.146836] env[68569]: DEBUG nova.virt.hardware [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1042.146982] env[68569]: DEBUG nova.virt.hardware [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1042.147163] env[68569]: DEBUG nova.virt.hardware [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1042.148532] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb175eef-3e63-42ab-be53-11b79bc4e6bb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.152553] env[68569]: INFO nova.scheduler.client.report [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Deleted allocations for instance 1689f1f3-53f2-4c02-a969-e4dae21f14b7 [ 1042.160380] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db784d3-b9ca-40f0-9edd-1c15ff13deab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.187287] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167554, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.192488] env[68569]: DEBUG oslo_vmware.api [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167557, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.271846] env[68569]: DEBUG nova.compute.manager [req-4b1a455a-551c-4e5e-834b-d9f530a13314 req-031b57e6-4dab-459d-af03-0f02a117aaf1 service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Received event network-vif-plugged-5d01ae9c-544e-4483-8eaf-ac68ad7945ec {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1042.272202] env[68569]: DEBUG oslo_concurrency.lockutils [req-4b1a455a-551c-4e5e-834b-d9f530a13314 req-031b57e6-4dab-459d-af03-0f02a117aaf1 service nova] Acquiring lock "4c122cff-f64c-4e4f-9454-034c44ff246b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.272439] env[68569]: DEBUG oslo_concurrency.lockutils [req-4b1a455a-551c-4e5e-834b-d9f530a13314 req-031b57e6-4dab-459d-af03-0f02a117aaf1 service nova] Lock "4c122cff-f64c-4e4f-9454-034c44ff246b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.272627] env[68569]: DEBUG oslo_concurrency.lockutils [req-4b1a455a-551c-4e5e-834b-d9f530a13314 req-031b57e6-4dab-459d-af03-0f02a117aaf1 service nova] Lock "4c122cff-f64c-4e4f-9454-034c44ff246b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.272813] env[68569]: DEBUG nova.compute.manager [req-4b1a455a-551c-4e5e-834b-d9f530a13314 req-031b57e6-4dab-459d-af03-0f02a117aaf1 service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] No waiting events found dispatching network-vif-plugged-5d01ae9c-544e-4483-8eaf-ac68ad7945ec {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1042.273012] env[68569]: WARNING nova.compute.manager [req-4b1a455a-551c-4e5e-834b-d9f530a13314 req-031b57e6-4dab-459d-af03-0f02a117aaf1 service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Received unexpected event network-vif-plugged-5d01ae9c-544e-4483-8eaf-ac68ad7945ec for instance with vm_state building and task_state spawning. [ 1042.273185] env[68569]: DEBUG nova.compute.manager [req-4b1a455a-551c-4e5e-834b-d9f530a13314 req-031b57e6-4dab-459d-af03-0f02a117aaf1 service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Received event network-changed-5d01ae9c-544e-4483-8eaf-ac68ad7945ec {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1042.273343] env[68569]: DEBUG nova.compute.manager [req-4b1a455a-551c-4e5e-834b-d9f530a13314 req-031b57e6-4dab-459d-af03-0f02a117aaf1 service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Refreshing instance network info cache due to event network-changed-5d01ae9c-544e-4483-8eaf-ac68ad7945ec. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1042.273515] env[68569]: DEBUG oslo_concurrency.lockutils [req-4b1a455a-551c-4e5e-834b-d9f530a13314 req-031b57e6-4dab-459d-af03-0f02a117aaf1 service nova] Acquiring lock "refresh_cache-4c122cff-f64c-4e4f-9454-034c44ff246b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.420139] env[68569]: DEBUG oslo_concurrency.lockutils [None req-858a181f-7642-4487-b726-458bf5730723 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "refresh_cache-92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.420316] env[68569]: DEBUG oslo_concurrency.lockutils [None req-858a181f-7642-4487-b726-458bf5730723 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquired lock "refresh_cache-92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.420505] env[68569]: DEBUG nova.network.neutron [None req-858a181f-7642-4487-b726-458bf5730723 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1042.420708] env[68569]: DEBUG nova.objects.instance [None req-858a181f-7642-4487-b726-458bf5730723 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lazy-loading 'info_cache' on Instance uuid 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1042.531578] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b87b55-f45c-e208-2840-0611bfb27464, 'name': SearchDatastore_Task, 'duration_secs': 0.096734} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.532105] env[68569]: DEBUG oslo_concurrency.lockutils [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.532372] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1042.532608] env[68569]: DEBUG oslo_concurrency.lockutils [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.532779] env[68569]: DEBUG oslo_concurrency.lockutils [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.532982] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1042.533624] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-407edd74-96fa-4b15-8e37-76e882397338 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.542588] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1042.542781] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1042.543598] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c693d4c0-bd19-429c-b0b2-8342a6c7fbdc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.548757] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1042.548757] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52de7ebd-129e-d3fb-294b-a24ef61badb5" [ 1042.548757] env[68569]: _type = "Task" [ 1042.548757] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.561772] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52de7ebd-129e-d3fb-294b-a24ef61badb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.585872] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167556, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.625484] env[68569]: DEBUG nova.network.neutron [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Successfully updated port: 5eb797c4-637c-475d-b402-04df004a7182 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1042.665130] env[68569]: DEBUG oslo_concurrency.lockutils [None req-865f9617-bcf0-4270-98d3-17464891ccbf tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "1689f1f3-53f2-4c02-a969-e4dae21f14b7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.527s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.679665] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167554, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.134848} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.682230] env[68569]: INFO nova.virt.vmwareapi.ds_util [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_e2f80d3d-0621-4990-96d5-7b49dd0091d0/OSTACK_IMG_e2f80d3d-0621-4990-96d5-7b49dd0091d0.vmdk to [datastore2] devstack-image-cache_base/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06.vmdk. [ 1042.682430] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Cleaning up location [datastore2] OSTACK_IMG_e2f80d3d-0621-4990-96d5-7b49dd0091d0 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1042.682596] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_e2f80d3d-0621-4990-96d5-7b49dd0091d0 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1042.684035] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41c401f3-f143-427b-bf54-a232220de152 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.694749] env[68569]: DEBUG oslo_vmware.api [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167557, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.695897] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 1042.695897] env[68569]: value = "task-3167558" [ 1042.695897] env[68569]: _type = "Task" [ 1042.695897] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.708132] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167558, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.764986] env[68569]: DEBUG nova.network.neutron [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Updating instance_info_cache with network_info: [{"id": "2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f", "address": "fa:16:3e:96:e0:c5", "network": {"id": "b039ea38-6dcb-4ba8-a78e-803c883ed538", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-199978725", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.111", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83427ebdc6364f7887409f7bfc35f025", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dc177dd-5f", "ovs_interfaceid": "2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ea020883-6378-448b-acd5-bd2f12b42b10", "address": "fa:16:3e:b5:1d:65", "network": {"id": "c8fc1e5a-5bb3-4ce1-91a4-9bf7c3113dcd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-607565493", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "83427ebdc6364f7887409f7bfc35f025", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea020883-63", "ovs_interfaceid": "ea020883-6378-448b-acd5-bd2f12b42b10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5d01ae9c-544e-4483-8eaf-ac68ad7945ec", "address": "fa:16:3e:17:8c:17", "network": {"id": "b039ea38-6dcb-4ba8-a78e-803c883ed538", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-199978725", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83427ebdc6364f7887409f7bfc35f025", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d01ae9c-54", "ovs_interfaceid": "5d01ae9c-544e-4483-8eaf-ac68ad7945ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.772895] env[68569]: DEBUG nova.network.neutron [req-695308b2-b0b1-40de-ace9-0f5c66b15a56 req-33c76f70-9e89-4432-a779-0cf4b663a35e service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Updated VIF entry in instance network info cache for port ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1042.772895] env[68569]: DEBUG nova.network.neutron [req-695308b2-b0b1-40de-ace9-0f5c66b15a56 req-33c76f70-9e89-4432-a779-0cf4b663a35e service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Updating instance_info_cache with network_info: [{"id": "324818fd-bb45-4575-9e4a-bb6516576dde", "address": "fa:16:3e:2b:19:03", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap324818fd-bb", "ovs_interfaceid": "324818fd-bb45-4575-9e4a-bb6516576dde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11", "address": "fa:16:3e:2b:3f:98", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad2700ea-e5", "ovs_interfaceid": "ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.899450] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc7c116-6c26-482f-8dc4-da635d6bbeef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.906588] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2bb4f56-ba39-4927-bfa1-d1c52c14ed45 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.939318] env[68569]: DEBUG nova.objects.base [None req-858a181f-7642-4487-b726-458bf5730723 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Object Instance<92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948> lazy-loaded attributes: flavor,info_cache {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1042.941645] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9980a1-ac48-4154-8ba1-627c0527d464 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.950157] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c236e31-8c53-46c9-bfcb-3139a82990ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.964830] env[68569]: DEBUG nova.compute.provider_tree [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.058985] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52de7ebd-129e-d3fb-294b-a24ef61badb5, 'name': SearchDatastore_Task, 'duration_secs': 0.013392} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.060106] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6775cba7-5261-45e4-8962-4c9a1f1ada94 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.065174] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1043.065174] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5251785d-b612-4fd6-e82f-0bdfe877bdcd" [ 1043.065174] env[68569]: _type = "Task" [ 1043.065174] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.073985] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5251785d-b612-4fd6-e82f-0bdfe877bdcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.081733] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167556, 'name': CreateSnapshot_Task, 'duration_secs': 1.972933} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.082216] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1043.083042] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7161cd1-7404-40c6-93f1-c63f3dd7153b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.130415] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "refresh_cache-16112ff1-bda8-4a20-b69c-b847ade376b4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.130415] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "refresh_cache-16112ff1-bda8-4a20-b69c-b847ade376b4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.130415] env[68569]: DEBUG nova.network.neutron [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1043.194937] env[68569]: DEBUG oslo_vmware.api [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167557, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.207818] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167558, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200575} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.207818] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1043.207818] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.207818] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06.vmdk to [datastore2] c0ea0ef8-93c2-416a-8caa-a51f7a39627e/c0ea0ef8-93c2-416a-8caa-a51f7a39627e.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1043.207818] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-887334ae-25df-4bcd-80f2-9675c14578e5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.215019] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 1043.215019] env[68569]: value = "task-3167559" [ 1043.215019] env[68569]: _type = "Task" [ 1043.215019] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.219753] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167559, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.267708] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Releasing lock "refresh_cache-4c122cff-f64c-4e4f-9454-034c44ff246b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.268380] env[68569]: DEBUG nova.compute.manager [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Instance network_info: |[{"id": "2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f", "address": "fa:16:3e:96:e0:c5", "network": {"id": "b039ea38-6dcb-4ba8-a78e-803c883ed538", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-199978725", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.111", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83427ebdc6364f7887409f7bfc35f025", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dc177dd-5f", "ovs_interfaceid": "2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ea020883-6378-448b-acd5-bd2f12b42b10", "address": "fa:16:3e:b5:1d:65", "network": {"id": "c8fc1e5a-5bb3-4ce1-91a4-9bf7c3113dcd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-607565493", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "83427ebdc6364f7887409f7bfc35f025", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea020883-63", "ovs_interfaceid": "ea020883-6378-448b-acd5-bd2f12b42b10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5d01ae9c-544e-4483-8eaf-ac68ad7945ec", "address": "fa:16:3e:17:8c:17", "network": {"id": "b039ea38-6dcb-4ba8-a78e-803c883ed538", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-199978725", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83427ebdc6364f7887409f7bfc35f025", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d01ae9c-54", "ovs_interfaceid": "5d01ae9c-544e-4483-8eaf-ac68ad7945ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1043.268989] env[68569]: DEBUG oslo_concurrency.lockutils [req-4b1a455a-551c-4e5e-834b-d9f530a13314 req-031b57e6-4dab-459d-af03-0f02a117aaf1 service nova] Acquired lock "refresh_cache-4c122cff-f64c-4e4f-9454-034c44ff246b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.269326] env[68569]: DEBUG nova.network.neutron [req-4b1a455a-551c-4e5e-834b-d9f530a13314 req-031b57e6-4dab-459d-af03-0f02a117aaf1 service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Refreshing network info cache for port 5d01ae9c-544e-4483-8eaf-ac68ad7945ec {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1043.270825] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:e0:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ce62383-8e84-4e26-955b-74c11392f4c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:1d:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2ede0e6-8d7a-4018-bb37-25bf388e9867', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea020883-6378-448b-acd5-bd2f12b42b10', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:8c:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ce62383-8e84-4e26-955b-74c11392f4c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d01ae9c-544e-4483-8eaf-ac68ad7945ec', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1043.283460] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Creating folder: Project (83427ebdc6364f7887409f7bfc35f025). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1043.284829] env[68569]: DEBUG oslo_concurrency.lockutils [req-695308b2-b0b1-40de-ace9-0f5c66b15a56 req-33c76f70-9e89-4432-a779-0cf4b663a35e service nova] Releasing lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.285221] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cff84e94-bd91-451f-8ccd-bd184864f06e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.295448] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Created folder: Project (83427ebdc6364f7887409f7bfc35f025) in parent group-v633430. [ 1043.295632] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Creating folder: Instances. Parent ref: group-v633690. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1043.295850] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3417b288-b062-4d21-b76e-4aa659fee540 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.304901] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Created folder: Instances in parent group-v633690. [ 1043.305142] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1043.305323] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1043.305518] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20444817-0f3c-4170-b030-17fffe58375b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.331394] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1043.331394] env[68569]: value = "task-3167562" [ 1043.331394] env[68569]: _type = "Task" [ 1043.331394] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.340426] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167562, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.467940] env[68569]: DEBUG nova.scheduler.client.report [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1043.576302] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5251785d-b612-4fd6-e82f-0bdfe877bdcd, 'name': SearchDatastore_Task, 'duration_secs': 0.046845} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.576566] env[68569]: DEBUG oslo_concurrency.lockutils [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.577191] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc/4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1043.579267] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-902d9fde-7b29-4792-a088-684732ef75ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.586308] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1043.586308] env[68569]: value = "task-3167563" [ 1043.586308] env[68569]: _type = "Task" [ 1043.586308] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.599967] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1043.600295] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167563, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.602664] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c1104aac-4d9f-40a8-92e5-44a6ddc768a7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.609676] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1043.609676] env[68569]: value = "task-3167564" [ 1043.609676] env[68569]: _type = "Task" [ 1043.609676] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.619295] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167564, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.695294] env[68569]: DEBUG oslo_vmware.api [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167557, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.696291] env[68569]: DEBUG nova.network.neutron [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1043.722342] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167559, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.778602] env[68569]: DEBUG nova.network.neutron [None req-858a181f-7642-4487-b726-458bf5730723 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Updating instance_info_cache with network_info: [{"id": "feea2419-1c39-4028-811f-c72311dae7a4", "address": "fa:16:3e:16:ea:60", "network": {"id": "9b574e65-367b-4135-8ce6-e40fd22bc9f3", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1543341439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "713d88f021794769a64eef3807ade9be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfeea2419-1c", "ovs_interfaceid": "feea2419-1c39-4028-811f-c72311dae7a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.849533] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167562, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.861166] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.861437] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.975695] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.849s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.980227] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.834s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.980227] env[68569]: DEBUG nova.objects.instance [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lazy-loading 'resources' on Instance uuid 281d7077-391a-4cce-9d31-af41568a2b7c {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1043.995803] env[68569]: DEBUG nova.compute.manager [req-70261735-5625-40bf-a45b-31059b8caa0b req-4bc01657-9aa8-4786-90d5-2cd787350d29 service nova] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Received event network-vif-plugged-5eb797c4-637c-475d-b402-04df004a7182 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1043.996282] env[68569]: DEBUG oslo_concurrency.lockutils [req-70261735-5625-40bf-a45b-31059b8caa0b req-4bc01657-9aa8-4786-90d5-2cd787350d29 service nova] Acquiring lock "16112ff1-bda8-4a20-b69c-b847ade376b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.996674] env[68569]: DEBUG oslo_concurrency.lockutils [req-70261735-5625-40bf-a45b-31059b8caa0b req-4bc01657-9aa8-4786-90d5-2cd787350d29 service nova] Lock "16112ff1-bda8-4a20-b69c-b847ade376b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.997683] env[68569]: DEBUG oslo_concurrency.lockutils [req-70261735-5625-40bf-a45b-31059b8caa0b req-4bc01657-9aa8-4786-90d5-2cd787350d29 service nova] Lock "16112ff1-bda8-4a20-b69c-b847ade376b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.997683] env[68569]: DEBUG nova.compute.manager [req-70261735-5625-40bf-a45b-31059b8caa0b req-4bc01657-9aa8-4786-90d5-2cd787350d29 service nova] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] No waiting events found dispatching network-vif-plugged-5eb797c4-637c-475d-b402-04df004a7182 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1043.997683] env[68569]: WARNING nova.compute.manager [req-70261735-5625-40bf-a45b-31059b8caa0b req-4bc01657-9aa8-4786-90d5-2cd787350d29 service nova] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Received unexpected event network-vif-plugged-5eb797c4-637c-475d-b402-04df004a7182 for instance with vm_state building and task_state spawning. [ 1043.997886] env[68569]: DEBUG nova.compute.manager [req-70261735-5625-40bf-a45b-31059b8caa0b req-4bc01657-9aa8-4786-90d5-2cd787350d29 service nova] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Received event network-changed-5eb797c4-637c-475d-b402-04df004a7182 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1044.003395] env[68569]: DEBUG nova.compute.manager [req-70261735-5625-40bf-a45b-31059b8caa0b req-4bc01657-9aa8-4786-90d5-2cd787350d29 service nova] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Refreshing instance network info cache due to event network-changed-5eb797c4-637c-475d-b402-04df004a7182. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1044.004281] env[68569]: DEBUG oslo_concurrency.lockutils [req-70261735-5625-40bf-a45b-31059b8caa0b req-4bc01657-9aa8-4786-90d5-2cd787350d29 service nova] Acquiring lock "refresh_cache-16112ff1-bda8-4a20-b69c-b847ade376b4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.013264] env[68569]: INFO nova.scheduler.client.report [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleted allocations for instance 1670f03a-94e2-4005-be7e-41aad61a8925 [ 1044.015297] env[68569]: DEBUG nova.network.neutron [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Updating instance_info_cache with network_info: [{"id": "5eb797c4-637c-475d-b402-04df004a7182", "address": "fa:16:3e:3b:ba:ed", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5eb797c4-63", "ovs_interfaceid": "5eb797c4-637c-475d-b402-04df004a7182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.098118] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167563, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.131495] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167564, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.196193] env[68569]: DEBUG oslo_vmware.api [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167557, 'name': ReconfigVM_Task, 'duration_secs': 2.464989} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.196803] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.197147] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Reconfigured VM to attach interface {{(pid=68569) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1044.223231] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167559, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.282281] env[68569]: DEBUG oslo_concurrency.lockutils [None req-858a181f-7642-4487-b726-458bf5730723 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Releasing lock "refresh_cache-92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.353795] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167562, 'name': CreateVM_Task, 'duration_secs': 0.869765} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.354288] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1044.355201] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.355498] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.355761] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1044.356113] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-476ecc38-48ce-4521-be57-beab1222e301 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.362152] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1044.362152] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]528ea0b1-3146-d699-59fd-c127b3af807f" [ 1044.362152] env[68569]: _type = "Task" [ 1044.362152] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.366484] env[68569]: INFO nova.compute.manager [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Detaching volume 3c545470-bd6a-4b3f-859b-0dbc00c8fe24 [ 1044.374305] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]528ea0b1-3146-d699-59fd-c127b3af807f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.412440] env[68569]: INFO nova.virt.block_device [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Attempting to driver detach volume 3c545470-bd6a-4b3f-859b-0dbc00c8fe24 from mountpoint /dev/sdb [ 1044.412745] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1044.413044] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633659', 'volume_id': '3c545470-bd6a-4b3f-859b-0dbc00c8fe24', 'name': 'volume-3c545470-bd6a-4b3f-859b-0dbc00c8fe24', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fd803a5e-8dbd-449e-b45d-1e6410a286e8', 'attached_at': '', 'detached_at': '', 'volume_id': '3c545470-bd6a-4b3f-859b-0dbc00c8fe24', 'serial': '3c545470-bd6a-4b3f-859b-0dbc00c8fe24'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1044.413982] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1dd0340-734c-41f1-a017-4530209ec36b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.439396] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33966f95-8709-42d7-8138-e1470d5b5bd9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.447038] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068d71c9-413f-439b-bb47-6bb41f87dbe3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.471203] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ee6391-66c3-4c54-899a-22165d885614 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.492470] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] The volume has not been displaced from its original location: [datastore1] volume-3c545470-bd6a-4b3f-859b-0dbc00c8fe24/volume-3c545470-bd6a-4b3f-859b-0dbc00c8fe24.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1044.498220] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Reconfiguring VM instance instance-00000030 to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1044.499418] env[68569]: DEBUG nova.network.neutron [req-4b1a455a-551c-4e5e-834b-d9f530a13314 req-031b57e6-4dab-459d-af03-0f02a117aaf1 service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Updated VIF entry in instance network info cache for port 5d01ae9c-544e-4483-8eaf-ac68ad7945ec. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1044.499973] env[68569]: DEBUG nova.network.neutron [req-4b1a455a-551c-4e5e-834b-d9f530a13314 req-031b57e6-4dab-459d-af03-0f02a117aaf1 service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Updating instance_info_cache with network_info: [{"id": "2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f", "address": "fa:16:3e:96:e0:c5", "network": {"id": "b039ea38-6dcb-4ba8-a78e-803c883ed538", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-199978725", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.111", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83427ebdc6364f7887409f7bfc35f025", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dc177dd-5f", "ovs_interfaceid": "2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ea020883-6378-448b-acd5-bd2f12b42b10", "address": "fa:16:3e:b5:1d:65", "network": {"id": "c8fc1e5a-5bb3-4ce1-91a4-9bf7c3113dcd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-607565493", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "83427ebdc6364f7887409f7bfc35f025", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea020883-63", "ovs_interfaceid": "ea020883-6378-448b-acd5-bd2f12b42b10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5d01ae9c-544e-4483-8eaf-ac68ad7945ec", "address": "fa:16:3e:17:8c:17", "network": {"id": "b039ea38-6dcb-4ba8-a78e-803c883ed538", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-199978725", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83427ebdc6364f7887409f7bfc35f025", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d01ae9c-54", "ovs_interfaceid": "5d01ae9c-544e-4483-8eaf-ac68ad7945ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.501582] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36cab355-427a-47d8-8bcf-d4ec55810fd7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.525025] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "refresh_cache-16112ff1-bda8-4a20-b69c-b847ade376b4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.525025] env[68569]: DEBUG nova.compute.manager [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Instance network_info: |[{"id": "5eb797c4-637c-475d-b402-04df004a7182", "address": "fa:16:3e:3b:ba:ed", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5eb797c4-63", "ovs_interfaceid": "5eb797c4-637c-475d-b402-04df004a7182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1044.527965] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ac09798a-13a6-4529-b724-040ca26e9966 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "1670f03a-94e2-4005-be7e-41aad61a8925" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.236s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.529016] env[68569]: DEBUG oslo_concurrency.lockutils [req-70261735-5625-40bf-a45b-31059b8caa0b req-4bc01657-9aa8-4786-90d5-2cd787350d29 service nova] Acquired lock "refresh_cache-16112ff1-bda8-4a20-b69c-b847ade376b4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.529206] env[68569]: DEBUG nova.network.neutron [req-70261735-5625-40bf-a45b-31059b8caa0b req-4bc01657-9aa8-4786-90d5-2cd787350d29 service nova] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Refreshing network info cache for port 5eb797c4-637c-475d-b402-04df004a7182 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1044.530326] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:ba:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5eb797c4-637c-475d-b402-04df004a7182', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1044.539337] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1044.539706] env[68569]: DEBUG oslo_vmware.api [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1044.539706] env[68569]: value = "task-3167565" [ 1044.539706] env[68569]: _type = "Task" [ 1044.539706] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.544831] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1044.544996] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-36c536d2-f7f5-4cef-9a63-f92906c8a1ea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.577284] env[68569]: DEBUG oslo_vmware.api [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167565, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.577654] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1044.577654] env[68569]: value = "task-3167566" [ 1044.577654] env[68569]: _type = "Task" [ 1044.577654] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.591551] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167566, 'name': CreateVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.606272] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167563, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.622777] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167564, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.702963] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d5ccb55-423d-4873-b18b-4895cb786260 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-6824efd5-427b-420d-83d5-a1d5acd94bf9-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.182s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.724457] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167559, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.799956] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b453de-6a09-466f-820e-bc5ded88c065 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.809020] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42484a0-24f3-49ec-aeac-ede9bd32020f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.846768] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3d4388-c492-4bde-a8f5-61599bd3b6e9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.855412] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd0ccba-53be-4c60-af9f-96aaff8a5618 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.870857] env[68569]: DEBUG nova.compute.provider_tree [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.881294] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]528ea0b1-3146-d699-59fd-c127b3af807f, 'name': SearchDatastore_Task, 'duration_secs': 0.02111} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.882310] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.882550] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1044.882797] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.882944] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.883142] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1044.883678] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-582d8343-82d0-4772-ac73-763cbb63b1b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.895119] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1044.895341] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1044.896141] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96e7214f-68e2-457a-8f6b-579357f25c52 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.902293] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1044.902293] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a49c0b-e8ce-77ed-d0f0-f1e19d68bbcb" [ 1044.902293] env[68569]: _type = "Task" [ 1044.902293] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.911230] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a49c0b-e8ce-77ed-d0f0-f1e19d68bbcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.018030] env[68569]: DEBUG oslo_concurrency.lockutils [req-4b1a455a-551c-4e5e-834b-d9f530a13314 req-031b57e6-4dab-459d-af03-0f02a117aaf1 service nova] Releasing lock "refresh_cache-4c122cff-f64c-4e4f-9454-034c44ff246b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.054559] env[68569]: DEBUG oslo_vmware.api [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167565, 'name': ReconfigVM_Task, 'duration_secs': 0.426677} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.054856] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Reconfigured VM instance instance-00000030 to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1045.060050] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8aae24b9-553b-42a7-aa18-0d11cf417072 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.076184] env[68569]: DEBUG oslo_vmware.api [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1045.076184] env[68569]: value = "task-3167567" [ 1045.076184] env[68569]: _type = "Task" [ 1045.076184] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.089964] env[68569]: DEBUG oslo_vmware.api [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167567, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.093537] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167566, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.103924] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167563, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.124359] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167564, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.229119] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167559, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.291087] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-858a181f-7642-4487-b726-458bf5730723 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1045.291493] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba65ad65-d34b-4300-bb22-39c4cb60ac69 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.301411] env[68569]: DEBUG oslo_vmware.api [None req-858a181f-7642-4487-b726-458bf5730723 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1045.301411] env[68569]: value = "task-3167568" [ 1045.301411] env[68569]: _type = "Task" [ 1045.301411] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.312644] env[68569]: DEBUG oslo_vmware.api [None req-858a181f-7642-4487-b726-458bf5730723 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167568, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.353246] env[68569]: DEBUG nova.network.neutron [req-70261735-5625-40bf-a45b-31059b8caa0b req-4bc01657-9aa8-4786-90d5-2cd787350d29 service nova] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Updated VIF entry in instance network info cache for port 5eb797c4-637c-475d-b402-04df004a7182. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1045.353640] env[68569]: DEBUG nova.network.neutron [req-70261735-5625-40bf-a45b-31059b8caa0b req-4bc01657-9aa8-4786-90d5-2cd787350d29 service nova] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Updating instance_info_cache with network_info: [{"id": "5eb797c4-637c-475d-b402-04df004a7182", "address": "fa:16:3e:3b:ba:ed", "network": {"id": "631b5a9a-a285-4b10-8df4-f25e41e7ffb9", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1572872308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc82d358e214a959ae6b34c33344b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5eb797c4-63", "ovs_interfaceid": "5eb797c4-637c-475d-b402-04df004a7182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.377161] env[68569]: DEBUG nova.scheduler.client.report [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1045.417036] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a49c0b-e8ce-77ed-d0f0-f1e19d68bbcb, 'name': SearchDatastore_Task, 'duration_secs': 0.016719} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.417230] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80e64134-3671-4775-852c-7f739c23c242 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.425512] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1045.425512] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e89885-a006-3511-93a0-2f4407147a8b" [ 1045.425512] env[68569]: _type = "Task" [ 1045.425512] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.436431] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e89885-a006-3511-93a0-2f4407147a8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.591324] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167566, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.594604] env[68569]: DEBUG oslo_vmware.api [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167567, 'name': ReconfigVM_Task, 'duration_secs': 0.218441} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.598020] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633659', 'volume_id': '3c545470-bd6a-4b3f-859b-0dbc00c8fe24', 'name': 'volume-3c545470-bd6a-4b3f-859b-0dbc00c8fe24', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fd803a5e-8dbd-449e-b45d-1e6410a286e8', 'attached_at': '', 'detached_at': '', 'volume_id': '3c545470-bd6a-4b3f-859b-0dbc00c8fe24', 'serial': '3c545470-bd6a-4b3f-859b-0dbc00c8fe24'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1045.607029] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167563, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.624651] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167564, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.726942] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167559, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.815927] env[68569]: DEBUG oslo_vmware.api [None req-858a181f-7642-4487-b726-458bf5730723 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167568, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.838691] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "d7a0631c-902d-4653-b900-2123de5bcb44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.838691] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "d7a0631c-902d-4653-b900-2123de5bcb44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.857130] env[68569]: DEBUG oslo_concurrency.lockutils [req-70261735-5625-40bf-a45b-31059b8caa0b req-4bc01657-9aa8-4786-90d5-2cd787350d29 service nova] Releasing lock "refresh_cache-16112ff1-bda8-4a20-b69c-b847ade376b4" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.883179] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.904s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.894820] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.112s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.896713] env[68569]: INFO nova.compute.claims [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1045.919020] env[68569]: INFO nova.scheduler.client.report [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleted allocations for instance 281d7077-391a-4cce-9d31-af41568a2b7c [ 1045.939716] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e89885-a006-3511-93a0-2f4407147a8b, 'name': SearchDatastore_Task, 'duration_secs': 0.029511} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.939985] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.940324] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 4c122cff-f64c-4e4f-9454-034c44ff246b/4c122cff-f64c-4e4f-9454-034c44ff246b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1045.940603] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a8e8664-ce9b-421a-93d8-26d33b924581 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.951359] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1045.951359] env[68569]: value = "task-3167569" [ 1045.951359] env[68569]: _type = "Task" [ 1045.951359] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.963209] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.093256] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167566, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.106154] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167563, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.348825} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.106466] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc/4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1046.106698] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1046.106976] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-20c8051a-5f33-4caa-b493-22015bd492da {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.118813] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1046.118813] env[68569]: value = "task-3167570" [ 1046.118813] env[68569]: _type = "Task" [ 1046.118813] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.126999] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167564, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.133567] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167570, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.161043] env[68569]: DEBUG nova.objects.instance [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lazy-loading 'flavor' on Instance uuid fd803a5e-8dbd-449e-b45d-1e6410a286e8 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1046.224396] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167559, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.314354] env[68569]: DEBUG oslo_vmware.api [None req-858a181f-7642-4487-b726-458bf5730723 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167568, 'name': PowerOnVM_Task, 'duration_secs': 0.884687} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.314866] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-858a181f-7642-4487-b726-458bf5730723 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1046.314918] env[68569]: DEBUG nova.compute.manager [None req-858a181f-7642-4487-b726-458bf5730723 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1046.315719] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a049f369-7d56-4c8a-abf9-47f31e6e5b52 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.342372] env[68569]: DEBUG nova.compute.manager [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1046.425086] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a4b79a81-1985-4581-a825-7983af9df78a tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "281d7077-391a-4cce-9d31-af41568a2b7c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.760s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.462344] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167569, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.492978] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "interface-6824efd5-427b-420d-83d5-a1d5acd94bf9-ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.492978] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-6824efd5-427b-420d-83d5-a1d5acd94bf9-ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.593318] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167566, 'name': CreateVM_Task, 'duration_secs': 1.956542} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.593574] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1046.594364] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.594536] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.594902] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1046.595273] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c86fa9c2-8d21-4033-b6e6-7e01731fe8f7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.600547] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1046.600547] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c2cf2e-0865-5743-95db-0a4b83a9e6ca" [ 1046.600547] env[68569]: _type = "Task" [ 1046.600547] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.609048] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c2cf2e-0865-5743-95db-0a4b83a9e6ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.626556] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167564, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.633233] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167570, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.213964} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.633509] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1046.634764] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155168ea-c94c-4029-9b36-1d003f28e605 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.660709] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc/4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1046.660709] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53ba95a8-bf9c-480f-ad68-0362ac04cf32 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.683839] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1046.683839] env[68569]: value = "task-3167571" [ 1046.683839] env[68569]: _type = "Task" [ 1046.683839] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.696381] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167571, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.728706] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167559, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.052541} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.729113] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06/f1fc5b9e-4d0a-4b68-b39b-3d0785190c06.vmdk to [datastore2] c0ea0ef8-93c2-416a-8caa-a51f7a39627e/c0ea0ef8-93c2-416a-8caa-a51f7a39627e.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1046.730192] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bcba933-6c2a-4fcf-a6fa-f7b009ca601a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.757172] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] c0ea0ef8-93c2-416a-8caa-a51f7a39627e/c0ea0ef8-93c2-416a-8caa-a51f7a39627e.vmdk or device None with type streamOptimized {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1046.757972] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba05a495-7b2d-46a1-86e3-bf995cbea567 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.779963] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 1046.779963] env[68569]: value = "task-3167572" [ 1046.779963] env[68569]: _type = "Task" [ 1046.779963] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.790806] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167572, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.867467] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.961948] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167569, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.916439} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.964759] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 4c122cff-f64c-4e4f-9454-034c44ff246b/4c122cff-f64c-4e4f-9454-034c44ff246b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1046.964887] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1046.965482] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-443e3032-69b4-47b9-a823-ba3efb5aa0f5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.973092] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1046.973092] env[68569]: value = "task-3167573" [ 1046.973092] env[68569]: _type = "Task" [ 1046.973092] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.983372] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167573, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.995749] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.995947] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.996836] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b679ed56-7bd8-43aa-abf5-4b6934758eb0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.018662] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0681b64c-3adc-4460-ac7c-a1f918be835a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.045871] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Reconfiguring VM to detach interface {{(pid=68569) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1047.048865] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6546f59a-d5a7-42b4-a847-64f78c34df36 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.067009] env[68569]: DEBUG oslo_vmware.api [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1047.067009] env[68569]: value = "task-3167574" [ 1047.067009] env[68569]: _type = "Task" [ 1047.067009] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.074896] env[68569]: DEBUG oslo_vmware.api [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167574, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.111097] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c2cf2e-0865-5743-95db-0a4b83a9e6ca, 'name': SearchDatastore_Task, 'duration_secs': 0.023203} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.111403] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.111647] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1047.111885] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.112031] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.112340] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1047.114961] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14ab3d7f-15a9-4d98-bf4c-45cfd17148e1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.128119] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167564, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.132056] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1047.132241] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1047.132954] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80280707-8a49-4e72-bb58-0f8baf0bd24c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.137817] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1047.137817] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52717eb2-e6e9-c14a-99fb-8d0492bbaef7" [ 1047.137817] env[68569]: _type = "Task" [ 1047.137817] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.148445] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52717eb2-e6e9-c14a-99fb-8d0492bbaef7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.179865] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4e7723f8-5d0d-473a-a013-e307c580be2d tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.318s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.188875] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7069b3cb-9067-4f07-9949-47fb5512bea7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.197678] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167571, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.200212] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b5de9a-7992-43a3-862d-c2bd4e825fcc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.229996] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5301182-ffe8-4cb8-b4db-00a74cfbe22b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.237044] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51310d0f-c504-408c-a461-8e07f87ddc11 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.250058] env[68569]: DEBUG nova.compute.provider_tree [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.288939] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167572, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.482888] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167573, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074965} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.483308] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1047.483918] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048e0fd1-f128-4936-9c1d-dcdc5cb33c6d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.513144] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 4c122cff-f64c-4e4f-9454-034c44ff246b/4c122cff-f64c-4e4f-9454-034c44ff246b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1047.513144] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfbe2e59-6164-45b7-b8bb-01f835205dcd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.532881] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1047.532881] env[68569]: value = "task-3167575" [ 1047.532881] env[68569]: _type = "Task" [ 1047.532881] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.540955] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167575, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.577240] env[68569]: DEBUG oslo_vmware.api [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167574, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.582835] env[68569]: DEBUG oslo_concurrency.lockutils [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.583123] env[68569]: DEBUG oslo_concurrency.lockutils [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.583345] env[68569]: DEBUG oslo_concurrency.lockutils [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.583533] env[68569]: DEBUG oslo_concurrency.lockutils [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.583701] env[68569]: DEBUG oslo_concurrency.lockutils [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.585823] env[68569]: INFO nova.compute.manager [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Terminating instance [ 1047.627319] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167564, 'name': CloneVM_Task} progress is 95%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.648440] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52717eb2-e6e9-c14a-99fb-8d0492bbaef7, 'name': SearchDatastore_Task, 'duration_secs': 0.020211} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.649279] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6312c46-5cd4-404c-b467-6fdb8927b106 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.654736] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1047.654736] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526b178f-3be0-db25-6ce8-d8d4a9ac9ae1" [ 1047.654736] env[68569]: _type = "Task" [ 1047.654736] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.662608] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526b178f-3be0-db25-6ce8-d8d4a9ac9ae1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.693741] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167571, 'name': ReconfigVM_Task, 'duration_secs': 0.95185} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.694065] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc/4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1047.694676] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d66fce58-57bb-433e-831e-11e42cab2629 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.700404] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1047.700404] env[68569]: value = "task-3167576" [ 1047.700404] env[68569]: _type = "Task" [ 1047.700404] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.707850] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167576, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.752988] env[68569]: DEBUG nova.scheduler.client.report [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1047.789403] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167572, 'name': ReconfigVM_Task, 'duration_secs': 0.98683} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.789599] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Reconfigured VM instance instance-0000004b to attach disk [datastore2] c0ea0ef8-93c2-416a-8caa-a51f7a39627e/c0ea0ef8-93c2-416a-8caa-a51f7a39627e.vmdk or device None with type streamOptimized {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1047.790440] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e6b8736e-9369-4a2d-9879-42f15201439f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.796956] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 1047.796956] env[68569]: value = "task-3167577" [ 1047.796956] env[68569]: _type = "Task" [ 1047.796956] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.805835] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167577, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.043158] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167575, 'name': ReconfigVM_Task, 'duration_secs': 0.459229} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.043381] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 4c122cff-f64c-4e4f-9454-034c44ff246b/4c122cff-f64c-4e4f-9454-034c44ff246b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.044062] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65f465a3-3007-44c0-bc5d-1461e49a103d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.050131] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1048.050131] env[68569]: value = "task-3167578" [ 1048.050131] env[68569]: _type = "Task" [ 1048.050131] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.058480] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167578, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.076616] env[68569]: DEBUG oslo_vmware.api [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167574, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.089379] env[68569]: DEBUG nova.compute.manager [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1048.089650] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1048.090462] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ef6eba-a204-4317-b167-12658b2e14b9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.098933] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.099182] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17e4eeef-a717-48f6-9bda-248c98f50969 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.104931] env[68569]: DEBUG oslo_vmware.api [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1048.104931] env[68569]: value = "task-3167579" [ 1048.104931] env[68569]: _type = "Task" [ 1048.104931] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.112462] env[68569]: DEBUG oslo_vmware.api [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167579, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.125857] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167564, 'name': CloneVM_Task, 'duration_secs': 4.3113} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.126140] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Created linked-clone VM from snapshot [ 1048.126851] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f1e56b-4a07-4dcf-908f-2dc8cd6180f0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.134949] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Uploading image 1146c72a-4d3b-4831-9e73-15f957a6f7be {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1048.156787] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1048.156787] env[68569]: value = "vm-633693" [ 1048.156787] env[68569]: _type = "VirtualMachine" [ 1048.156787] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1048.157137] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7ae20bfb-7fa7-4b35-ab9a-0a51fcbee6e4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.168815] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526b178f-3be0-db25-6ce8-d8d4a9ac9ae1, 'name': SearchDatastore_Task, 'duration_secs': 0.027497} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.169854] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.170215] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 16112ff1-bda8-4a20-b69c-b847ade376b4/16112ff1-bda8-4a20-b69c-b847ade376b4.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1048.170610] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lease: (returnval){ [ 1048.170610] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e43108-b63d-c5c7-5b8e-a3cd4fae6297" [ 1048.170610] env[68569]: _type = "HttpNfcLease" [ 1048.170610] env[68569]: } obtained for exporting VM: (result){ [ 1048.170610] env[68569]: value = "vm-633693" [ 1048.170610] env[68569]: _type = "VirtualMachine" [ 1048.170610] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1048.170992] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the lease: (returnval){ [ 1048.170992] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e43108-b63d-c5c7-5b8e-a3cd4fae6297" [ 1048.170992] env[68569]: _type = "HttpNfcLease" [ 1048.170992] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1048.171207] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c599eef-3783-4f1d-8a62-2d51421a4c15 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.179359] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1048.179359] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e43108-b63d-c5c7-5b8e-a3cd4fae6297" [ 1048.179359] env[68569]: _type = "HttpNfcLease" [ 1048.179359] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1048.180685] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1048.180685] env[68569]: value = "task-3167581" [ 1048.180685] env[68569]: _type = "Task" [ 1048.180685] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.188114] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167581, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.209410] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167576, 'name': Rename_Task, 'duration_secs': 0.222931} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.209678] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1048.209913] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38374e5c-647e-4df9-b94f-503a4d496eef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.215186] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1048.215186] env[68569]: value = "task-3167582" [ 1048.215186] env[68569]: _type = "Task" [ 1048.215186] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.222976] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167582, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.260361] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.260932] env[68569]: DEBUG nova.compute.manager [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1048.263832] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.396s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.265293] env[68569]: INFO nova.compute.claims [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1048.307999] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167577, 'name': Rename_Task, 'duration_secs': 0.179901} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.308359] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1048.308620] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-135a5056-e615-4f06-8e1b-a4f743680121 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.315651] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 1048.315651] env[68569]: value = "task-3167583" [ 1048.315651] env[68569]: _type = "Task" [ 1048.315651] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.324520] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167583, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.562035] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167578, 'name': Rename_Task, 'duration_secs': 0.151102} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.562390] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1048.562390] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-422d8a48-5125-45e3-b937-6e7ae2f62ecd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.570133] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1048.570133] env[68569]: value = "task-3167584" [ 1048.570133] env[68569]: _type = "Task" [ 1048.570133] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.585356] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167584, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.585678] env[68569]: DEBUG oslo_vmware.api [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167574, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.615125] env[68569]: DEBUG oslo_vmware.api [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167579, 'name': PowerOffVM_Task, 'duration_secs': 0.284566} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.615421] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1048.615589] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1048.615846] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-474477b0-c830-4268-8096-09afb3249d9b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.680099] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1048.680470] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1048.680683] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Deleting the datastore file [datastore2] fd803a5e-8dbd-449e-b45d-1e6410a286e8 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1048.681558] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7dc66a44-8c05-49f6-b30d-4f2827f5844e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.690280] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1048.690280] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e43108-b63d-c5c7-5b8e-a3cd4fae6297" [ 1048.690280] env[68569]: _type = "HttpNfcLease" [ 1048.690280] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1048.690976] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1048.690976] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e43108-b63d-c5c7-5b8e-a3cd4fae6297" [ 1048.690976] env[68569]: _type = "HttpNfcLease" [ 1048.690976] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1048.692206] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d0856c-a489-43a4-840b-0ac0097a44ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.700179] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167581, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465614} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.700587] env[68569]: DEBUG oslo_vmware.api [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1048.700587] env[68569]: value = "task-3167586" [ 1048.700587] env[68569]: _type = "Task" [ 1048.700587] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.701389] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 16112ff1-bda8-4a20-b69c-b847ade376b4/16112ff1-bda8-4a20-b69c-b847ade376b4.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1048.701809] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1048.702166] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bfeae546-c792-483d-8784-81ee8e7a1794 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.711982] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfde3a-5659-c1c5-aae1-d7bad0e4405f/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1048.712227] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfde3a-5659-c1c5-aae1-d7bad0e4405f/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1048.793349] env[68569]: DEBUG nova.compute.utils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1048.807010] env[68569]: DEBUG nova.compute.manager [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1048.807355] env[68569]: DEBUG nova.network.neutron [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1048.810455] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1048.810455] env[68569]: value = "task-3167587" [ 1048.810455] env[68569]: _type = "Task" [ 1048.810455] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.810818] env[68569]: DEBUG oslo_vmware.api [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167586, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.834325] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167582, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.842090] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167587, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060927} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.854025] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1048.854025] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167583, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.854025] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7bc1e87-1192-4f4f-96a1-1c018f034549 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.862511] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3a24e894-645f-4557-89f1-ffaac3a25de0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.895361] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 16112ff1-bda8-4a20-b69c-b847ade376b4/16112ff1-bda8-4a20-b69c-b847ade376b4.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1048.896903] env[68569]: DEBUG nova.policy [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e16bef4b0a6d4a5e937e4f3c4a3329b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67e07f7ab9ab41feb4d71e1d128d093d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1048.902824] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4b97ce1-be75-49be-8f87-b357829e94be {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.926327] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1048.926327] env[68569]: value = "task-3167588" [ 1048.926327] env[68569]: _type = "Task" [ 1048.926327] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.942101] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167588, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.080080] env[68569]: DEBUG oslo_vmware.api [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167574, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.089158] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167584, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.145054] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301101f3-5637-4bec-9a8f-33e34db51a5d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.157650] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72f1564-b04a-42a1-be37-8391f4414d38 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.195401] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcfda3ce-1626-40f6-b67e-f9059fe4f115 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.209346] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a7c7439-3a8f-475c-9dd6-503bb0e8f5e4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.228191] env[68569]: DEBUG nova.compute.provider_tree [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.229900] env[68569]: DEBUG oslo_vmware.api [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167586, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155413} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.235518] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1049.235518] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1049.235518] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1049.235518] env[68569]: INFO nova.compute.manager [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1049.235518] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1049.235518] env[68569]: DEBUG nova.compute.manager [-] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1049.235518] env[68569]: DEBUG nova.network.neutron [-] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1049.241880] env[68569]: DEBUG oslo_vmware.api [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167582, 'name': PowerOnVM_Task, 'duration_secs': 0.68648} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.243073] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1049.243304] env[68569]: DEBUG nova.compute.manager [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1049.244182] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35442e40-d149-4cc8-9ce3-11523edfd1c9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.301475] env[68569]: DEBUG nova.compute.manager [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1049.328523] env[68569]: DEBUG oslo_vmware.api [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167583, 'name': PowerOnVM_Task, 'duration_secs': 0.856059} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.328890] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1049.374358] env[68569]: DEBUG nova.network.neutron [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Successfully created port: b3f706dd-a754-40c2-ba12-9cfef3e9e2c1 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1049.437556] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167588, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.438939] env[68569]: DEBUG nova.compute.manager [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1049.439970] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c411d768-3c02-47ff-9e78-518ffedf7f8d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.586676] env[68569]: DEBUG oslo_vmware.api [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167574, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.590172] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167584, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.736141] env[68569]: DEBUG nova.scheduler.client.report [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1049.763452] env[68569]: DEBUG oslo_concurrency.lockutils [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.938381] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167588, 'name': ReconfigVM_Task, 'duration_secs': 0.758894} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.938798] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 16112ff1-bda8-4a20-b69c-b847ade376b4/16112ff1-bda8-4a20-b69c-b847ade376b4.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1049.939673] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f848f857-c06a-4410-8e7d-52c6734cd325 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.948255] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1049.948255] env[68569]: value = "task-3167589" [ 1049.948255] env[68569]: _type = "Task" [ 1049.948255] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.957409] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9e6c39ec-844f-4dcf-b60e-c469d93dc83b tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 37.042s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.962155] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167589, 'name': Rename_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.998653] env[68569]: DEBUG nova.compute.manager [req-27d625eb-63fd-4175-8d32-7595ba195e69 req-93c776fa-eec1-43c1-b8a4-74bb9420cd3f service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Received event network-vif-deleted-8c249c42-adbc-4fe1-994f-1f00f6dfd6d1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1050.001591] env[68569]: INFO nova.compute.manager [req-27d625eb-63fd-4175-8d32-7595ba195e69 req-93c776fa-eec1-43c1-b8a4-74bb9420cd3f service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Neutron deleted interface 8c249c42-adbc-4fe1-994f-1f00f6dfd6d1; detaching it from the instance and deleting it from the info cache [ 1050.001591] env[68569]: DEBUG nova.network.neutron [req-27d625eb-63fd-4175-8d32-7595ba195e69 req-93c776fa-eec1-43c1-b8a4-74bb9420cd3f service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.087049] env[68569]: DEBUG oslo_vmware.api [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167574, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.090565] env[68569]: DEBUG oslo_vmware.api [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167584, 'name': PowerOnVM_Task, 'duration_secs': 1.456094} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.090865] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1050.091194] env[68569]: INFO nova.compute.manager [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Took 15.00 seconds to spawn the instance on the hypervisor. [ 1050.091435] env[68569]: DEBUG nova.compute.manager [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1050.092343] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3176325-0688-4021-9466-b0fe59cb4ab4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.242242] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.978s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.242836] env[68569]: DEBUG nova.compute.manager [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1050.246480] env[68569]: DEBUG oslo_concurrency.lockutils [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.483s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.246821] env[68569]: DEBUG nova.objects.instance [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68569) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1050.310500] env[68569]: DEBUG nova.compute.manager [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1050.338942] env[68569]: DEBUG nova.virt.hardware [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1050.338942] env[68569]: DEBUG nova.virt.hardware [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1050.339246] env[68569]: DEBUG nova.virt.hardware [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1050.339246] env[68569]: DEBUG nova.virt.hardware [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1050.339343] env[68569]: DEBUG nova.virt.hardware [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1050.340152] env[68569]: DEBUG nova.virt.hardware [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1050.340152] env[68569]: DEBUG nova.virt.hardware [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1050.340152] env[68569]: DEBUG nova.virt.hardware [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1050.340152] env[68569]: DEBUG nova.virt.hardware [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1050.340383] env[68569]: DEBUG nova.virt.hardware [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1050.341008] env[68569]: DEBUG nova.virt.hardware [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1050.342097] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eacb9576-07c5-499c-9227-624c33abf5ea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.351124] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76b082a-431c-45e9-87a7-8bb2cd058eb7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.456712] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167589, 'name': Rename_Task, 'duration_secs': 0.194928} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.457097] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1050.457364] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e5a9656-6db1-476a-96ac-820c047ce4a7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.465010] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1050.465010] env[68569]: value = "task-3167590" [ 1050.465010] env[68569]: _type = "Task" [ 1050.465010] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.465010] env[68569]: DEBUG nova.network.neutron [-] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.477021] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167590, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.504612] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dbc81739-5ca9-4994-abcd-d5523360be8f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.515355] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebeea40b-8a28-4e2a-9397-7175f686f926 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.551232] env[68569]: DEBUG nova.compute.manager [req-27d625eb-63fd-4175-8d32-7595ba195e69 req-93c776fa-eec1-43c1-b8a4-74bb9420cd3f service nova] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Detach interface failed, port_id=8c249c42-adbc-4fe1-994f-1f00f6dfd6d1, reason: Instance fd803a5e-8dbd-449e-b45d-1e6410a286e8 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1050.583531] env[68569]: DEBUG oslo_vmware.api [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167574, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.611061] env[68569]: INFO nova.compute.manager [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Took 36.34 seconds to build instance. [ 1050.758897] env[68569]: DEBUG nova.compute.utils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1050.761611] env[68569]: DEBUG nova.compute.manager [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1050.761611] env[68569]: DEBUG nova.network.neutron [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1050.813997] env[68569]: DEBUG nova.policy [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '330a73f609f746d8b8c1a7eefe557c69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'decd2576711b41bbb25300d9db62643e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1050.818208] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.818445] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.818682] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.818815] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.818981] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.821125] env[68569]: INFO nova.compute.manager [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Terminating instance [ 1050.971405] env[68569]: INFO nova.compute.manager [-] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Took 1.74 seconds to deallocate network for instance. [ 1050.979984] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167590, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.088410] env[68569]: DEBUG oslo_vmware.api [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167574, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.113310] env[68569]: DEBUG oslo_concurrency.lockutils [None req-da0b76be-e626-4a6a-b484-92cd3f9d9518 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "4c122cff-f64c-4e4f-9454-034c44ff246b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.852s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.265508] env[68569]: DEBUG oslo_concurrency.lockutils [None req-692a23c4-5eb3-4531-a4d8-9055364a492d tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.266988] env[68569]: DEBUG nova.compute.manager [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1051.324389] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "refresh_cache-4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.324603] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquired lock "refresh_cache-4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.326158] env[68569]: DEBUG nova.network.neutron [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1051.343511] env[68569]: DEBUG nova.network.neutron [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Successfully updated port: b3f706dd-a754-40c2-ba12-9cfef3e9e2c1 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1051.348787] env[68569]: DEBUG nova.network.neutron [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Successfully created port: e9d20c97-bf70-4f30-8424-5db6b70e5677 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1051.475845] env[68569]: DEBUG oslo_vmware.api [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167590, 'name': PowerOnVM_Task, 'duration_secs': 0.772454} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.476163] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1051.476542] env[68569]: INFO nova.compute.manager [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Took 9.35 seconds to spawn the instance on the hypervisor. [ 1051.476630] env[68569]: DEBUG nova.compute.manager [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1051.477487] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59224384-e7cf-410d-8ba2-7b72082e9121 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.481052] env[68569]: DEBUG oslo_concurrency.lockutils [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.481369] env[68569]: DEBUG oslo_concurrency.lockutils [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.481632] env[68569]: DEBUG nova.objects.instance [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lazy-loading 'resources' on Instance uuid fd803a5e-8dbd-449e-b45d-1e6410a286e8 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.584432] env[68569]: DEBUG oslo_vmware.api [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167574, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.715875] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7713d243-65e3-49e2-a372-01f754e07673 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.722699] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f5869d9b-aad5-4cff-9a88-9d03a4c4f346 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Suspending the VM {{(pid=68569) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1051.723013] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d7f6439e-653c-46db-8418-98b4defc0664 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.729510] env[68569]: DEBUG oslo_vmware.api [None req-f5869d9b-aad5-4cff-9a88-9d03a4c4f346 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 1051.729510] env[68569]: value = "task-3167591" [ 1051.729510] env[68569]: _type = "Task" [ 1051.729510] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.737665] env[68569]: DEBUG oslo_vmware.api [None req-f5869d9b-aad5-4cff-9a88-9d03a4c4f346 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167591, 'name': SuspendVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.847254] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "refresh_cache-6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.847254] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "refresh_cache-6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.847254] env[68569]: DEBUG nova.network.neutron [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1051.849354] env[68569]: DEBUG nova.network.neutron [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1051.902307] env[68569]: DEBUG nova.network.neutron [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.955327] env[68569]: DEBUG oslo_concurrency.lockutils [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "4c122cff-f64c-4e4f-9454-034c44ff246b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.955640] env[68569]: DEBUG oslo_concurrency.lockutils [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "4c122cff-f64c-4e4f-9454-034c44ff246b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.955890] env[68569]: DEBUG oslo_concurrency.lockutils [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "4c122cff-f64c-4e4f-9454-034c44ff246b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.956108] env[68569]: DEBUG oslo_concurrency.lockutils [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "4c122cff-f64c-4e4f-9454-034c44ff246b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.956288] env[68569]: DEBUG oslo_concurrency.lockutils [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "4c122cff-f64c-4e4f-9454-034c44ff246b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.958737] env[68569]: INFO nova.compute.manager [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Terminating instance [ 1052.004591] env[68569]: INFO nova.compute.manager [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Took 29.87 seconds to build instance. [ 1052.032588] env[68569]: DEBUG nova.compute.manager [req-e068051e-c71b-4e84-af27-79961b2ba47e req-3581fcc1-439d-41ab-8a67-458d07ca2fe5 service nova] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Received event network-vif-plugged-b3f706dd-a754-40c2-ba12-9cfef3e9e2c1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1052.032900] env[68569]: DEBUG oslo_concurrency.lockutils [req-e068051e-c71b-4e84-af27-79961b2ba47e req-3581fcc1-439d-41ab-8a67-458d07ca2fe5 service nova] Acquiring lock "6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.033236] env[68569]: DEBUG oslo_concurrency.lockutils [req-e068051e-c71b-4e84-af27-79961b2ba47e req-3581fcc1-439d-41ab-8a67-458d07ca2fe5 service nova] Lock "6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.033494] env[68569]: DEBUG oslo_concurrency.lockutils [req-e068051e-c71b-4e84-af27-79961b2ba47e req-3581fcc1-439d-41ab-8a67-458d07ca2fe5 service nova] Lock "6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.033743] env[68569]: DEBUG nova.compute.manager [req-e068051e-c71b-4e84-af27-79961b2ba47e req-3581fcc1-439d-41ab-8a67-458d07ca2fe5 service nova] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] No waiting events found dispatching network-vif-plugged-b3f706dd-a754-40c2-ba12-9cfef3e9e2c1 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1052.034674] env[68569]: WARNING nova.compute.manager [req-e068051e-c71b-4e84-af27-79961b2ba47e req-3581fcc1-439d-41ab-8a67-458d07ca2fe5 service nova] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Received unexpected event network-vif-plugged-b3f706dd-a754-40c2-ba12-9cfef3e9e2c1 for instance with vm_state building and task_state spawning. [ 1052.034674] env[68569]: DEBUG nova.compute.manager [req-e068051e-c71b-4e84-af27-79961b2ba47e req-3581fcc1-439d-41ab-8a67-458d07ca2fe5 service nova] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Received event network-changed-b3f706dd-a754-40c2-ba12-9cfef3e9e2c1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1052.034674] env[68569]: DEBUG nova.compute.manager [req-e068051e-c71b-4e84-af27-79961b2ba47e req-3581fcc1-439d-41ab-8a67-458d07ca2fe5 service nova] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Refreshing instance network info cache due to event network-changed-b3f706dd-a754-40c2-ba12-9cfef3e9e2c1. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1052.034674] env[68569]: DEBUG oslo_concurrency.lockutils [req-e068051e-c71b-4e84-af27-79961b2ba47e req-3581fcc1-439d-41ab-8a67-458d07ca2fe5 service nova] Acquiring lock "refresh_cache-6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.084123] env[68569]: DEBUG oslo_vmware.api [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167574, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.239372] env[68569]: DEBUG oslo_vmware.api [None req-f5869d9b-aad5-4cff-9a88-9d03a4c4f346 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167591, 'name': SuspendVM_Task} progress is 45%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.241691] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c67682c-c5e9-4063-92ed-83f587921bb7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.249134] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363ee389-3376-4b79-a985-1881509e7a1f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.279404] env[68569]: DEBUG nova.compute.manager [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1052.282888] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d27dd9-5150-435c-956a-dcfe4861eaff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.292685] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430a126f-1727-4f11-9f1a-08aa59f91bf9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.309364] env[68569]: DEBUG nova.compute.provider_tree [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.312852] env[68569]: DEBUG nova.virt.hardware [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1052.313128] env[68569]: DEBUG nova.virt.hardware [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1052.313288] env[68569]: DEBUG nova.virt.hardware [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1052.313470] env[68569]: DEBUG nova.virt.hardware [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1052.313629] env[68569]: DEBUG nova.virt.hardware [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1052.313778] env[68569]: DEBUG nova.virt.hardware [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1052.313985] env[68569]: DEBUG nova.virt.hardware [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1052.314159] env[68569]: DEBUG nova.virt.hardware [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1052.314325] env[68569]: DEBUG nova.virt.hardware [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1052.314486] env[68569]: DEBUG nova.virt.hardware [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1052.314656] env[68569]: DEBUG nova.virt.hardware [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1052.315490] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4fdfa9-f0ab-446a-bcb8-cd54809f2318 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.324532] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6991f6-1577-43ce-b5af-58b1ddbba5ca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.387738] env[68569]: DEBUG nova.network.neutron [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1052.405265] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Releasing lock "refresh_cache-4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1052.405666] env[68569]: DEBUG nova.compute.manager [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1052.405860] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1052.406748] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766fe4ee-077d-4bb5-ad3c-3ee5709fdd89 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.414866] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1052.415464] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f2eda2e-11cb-4089-8e2e-ce7d1d7e6e0b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.423567] env[68569]: DEBUG oslo_vmware.api [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1052.423567] env[68569]: value = "task-3167592" [ 1052.423567] env[68569]: _type = "Task" [ 1052.423567] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.430937] env[68569]: DEBUG oslo_vmware.api [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167592, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.466019] env[68569]: DEBUG nova.compute.manager [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1052.466019] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1052.466019] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c68563-9ce8-41f5-8f05-0143d4c493d5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.473528] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1052.474305] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36ef460d-30c1-4697-a9a0-c57b80578c27 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.481908] env[68569]: DEBUG oslo_vmware.api [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1052.481908] env[68569]: value = "task-3167593" [ 1052.481908] env[68569]: _type = "Task" [ 1052.481908] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.492866] env[68569]: DEBUG oslo_vmware.api [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167593, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.507137] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d93716c3-990a-44bf-a7ce-a3c807124b9f tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "16112ff1-bda8-4a20-b69c-b847ade376b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.377s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.573751] env[68569]: DEBUG nova.network.neutron [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Updating instance_info_cache with network_info: [{"id": "b3f706dd-a754-40c2-ba12-9cfef3e9e2c1", "address": "fa:16:3e:8a:29:3b", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3f706dd-a7", "ovs_interfaceid": "b3f706dd-a754-40c2-ba12-9cfef3e9e2c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.585179] env[68569]: DEBUG oslo_vmware.api [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167574, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.739928] env[68569]: DEBUG oslo_vmware.api [None req-f5869d9b-aad5-4cff-9a88-9d03a4c4f346 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167591, 'name': SuspendVM_Task, 'duration_secs': 0.976267} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.740320] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f5869d9b-aad5-4cff-9a88-9d03a4c4f346 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Suspended the VM {{(pid=68569) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1052.740391] env[68569]: DEBUG nova.compute.manager [None req-f5869d9b-aad5-4cff-9a88-9d03a4c4f346 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1052.741289] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566f5ef9-cf5c-4abe-bc94-ccc1b705e3c8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.820451] env[68569]: DEBUG nova.scheduler.client.report [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1052.935140] env[68569]: DEBUG oslo_vmware.api [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167592, 'name': PowerOffVM_Task, 'duration_secs': 0.268465} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.935412] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.935575] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1052.935823] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1520ae4f-b27d-463d-8465-a99adacf6980 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.961913] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1052.962137] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1052.962326] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Deleting the datastore file [datastore2] 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1052.962586] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1780c72c-9d80-4ceb-95a5-1902c5945930 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.969607] env[68569]: DEBUG oslo_vmware.api [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1052.969607] env[68569]: value = "task-3167595" [ 1052.969607] env[68569]: _type = "Task" [ 1052.969607] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.980862] env[68569]: DEBUG oslo_vmware.api [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167595, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.997410] env[68569]: DEBUG oslo_vmware.api [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167593, 'name': PowerOffVM_Task, 'duration_secs': 0.344772} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.997695] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.997862] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1052.998133] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a9e0c25-32a0-4ecb-a8d5-e8b2af984f72 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.057025] env[68569]: DEBUG nova.network.neutron [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Successfully updated port: e9d20c97-bf70-4f30-8424-5db6b70e5677 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1053.079967] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "refresh_cache-6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.080201] env[68569]: DEBUG nova.compute.manager [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Instance network_info: |[{"id": "b3f706dd-a754-40c2-ba12-9cfef3e9e2c1", "address": "fa:16:3e:8a:29:3b", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3f706dd-a7", "ovs_interfaceid": "b3f706dd-a754-40c2-ba12-9cfef3e9e2c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1053.080976] env[68569]: DEBUG oslo_concurrency.lockutils [req-e068051e-c71b-4e84-af27-79961b2ba47e req-3581fcc1-439d-41ab-8a67-458d07ca2fe5 service nova] Acquired lock "refresh_cache-6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1053.081208] env[68569]: DEBUG nova.network.neutron [req-e068051e-c71b-4e84-af27-79961b2ba47e req-3581fcc1-439d-41ab-8a67-458d07ca2fe5 service nova] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Refreshing network info cache for port b3f706dd-a754-40c2-ba12-9cfef3e9e2c1 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1053.082517] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:29:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54c45719-5690-47bf-b45b-6cad9813071e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3f706dd-a754-40c2-ba12-9cfef3e9e2c1', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1053.090529] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1053.095040] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1053.095387] env[68569]: DEBUG oslo_vmware.api [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167574, 'name': ReconfigVM_Task, 'duration_secs': 5.805074} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.095617] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02dc466b-f3f3-4ac8-a824-f8c3980d10ac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.111091] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.111353] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Reconfigured VM to detach interface {{(pid=68569) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1053.119206] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1053.119206] env[68569]: value = "task-3167597" [ 1053.119206] env[68569]: _type = "Task" [ 1053.119206] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.129191] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167597, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.164995] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1053.164995] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1053.164995] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Deleting the datastore file [datastore1] 4c122cff-f64c-4e4f-9454-034c44ff246b {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1053.165554] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a140fcf7-4718-4f03-85ad-6ef9bb07ea73 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.172111] env[68569]: DEBUG oslo_vmware.api [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1053.172111] env[68569]: value = "task-3167598" [ 1053.172111] env[68569]: _type = "Task" [ 1053.172111] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.183131] env[68569]: DEBUG oslo_vmware.api [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167598, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.326034] env[68569]: DEBUG oslo_concurrency.lockutils [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.844s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.355157] env[68569]: INFO nova.scheduler.client.report [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Deleted allocations for instance fd803a5e-8dbd-449e-b45d-1e6410a286e8 [ 1053.482193] env[68569]: DEBUG oslo_vmware.api [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167595, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24172} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.482519] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1053.482718] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1053.482908] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1053.483099] env[68569]: INFO nova.compute.manager [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1053.483383] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1053.483620] env[68569]: DEBUG nova.compute.manager [-] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1053.483971] env[68569]: DEBUG nova.network.neutron [-] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1053.502267] env[68569]: DEBUG nova.network.neutron [-] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1053.564714] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "refresh_cache-d7a0631c-902d-4653-b900-2123de5bcb44" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.564714] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "refresh_cache-d7a0631c-902d-4653-b900-2123de5bcb44" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1053.564714] env[68569]: DEBUG nova.network.neutron [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1053.629662] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167597, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.683325] env[68569]: DEBUG oslo_vmware.api [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167598, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.397305} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.683611] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1053.683809] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1053.683976] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1053.684161] env[68569]: INFO nova.compute.manager [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1053.684402] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1053.684598] env[68569]: DEBUG nova.compute.manager [-] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1053.684691] env[68569]: DEBUG nova.network.neutron [-] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1053.868358] env[68569]: DEBUG oslo_concurrency.lockutils [None req-382b2c42-6b79-495e-ab68-14b11509b137 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "fd803a5e-8dbd-449e-b45d-1e6410a286e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.285s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.002473] env[68569]: DEBUG nova.network.neutron [req-e068051e-c71b-4e84-af27-79961b2ba47e req-3581fcc1-439d-41ab-8a67-458d07ca2fe5 service nova] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Updated VIF entry in instance network info cache for port b3f706dd-a754-40c2-ba12-9cfef3e9e2c1. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1054.002826] env[68569]: DEBUG nova.network.neutron [req-e068051e-c71b-4e84-af27-79961b2ba47e req-3581fcc1-439d-41ab-8a67-458d07ca2fe5 service nova] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Updating instance_info_cache with network_info: [{"id": "b3f706dd-a754-40c2-ba12-9cfef3e9e2c1", "address": "fa:16:3e:8a:29:3b", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3f706dd-a7", "ovs_interfaceid": "b3f706dd-a754-40c2-ba12-9cfef3e9e2c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.004530] env[68569]: DEBUG nova.network.neutron [-] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.050521] env[68569]: DEBUG oslo_concurrency.lockutils [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "16112ff1-bda8-4a20-b69c-b847ade376b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.050861] env[68569]: DEBUG oslo_concurrency.lockutils [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "16112ff1-bda8-4a20-b69c-b847ade376b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.051095] env[68569]: DEBUG oslo_concurrency.lockutils [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "16112ff1-bda8-4a20-b69c-b847ade376b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.051277] env[68569]: DEBUG oslo_concurrency.lockutils [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "16112ff1-bda8-4a20-b69c-b847ade376b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.051461] env[68569]: DEBUG oslo_concurrency.lockutils [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "16112ff1-bda8-4a20-b69c-b847ade376b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.053734] env[68569]: INFO nova.compute.manager [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Terminating instance [ 1054.131586] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167597, 'name': CreateVM_Task, 'duration_secs': 0.532845} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.131883] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1054.132488] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.132654] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.132969] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1054.133862] env[68569]: DEBUG nova.network.neutron [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1054.135890] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f764367e-6a9a-4227-9e2a-37d63b5d43b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.141436] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1054.141436] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5244e326-5915-4fbf-135d-020bf9e249c4" [ 1054.141436] env[68569]: _type = "Task" [ 1054.141436] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.153418] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5244e326-5915-4fbf-135d-020bf9e249c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.205544] env[68569]: DEBUG nova.compute.manager [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Received event network-vif-plugged-e9d20c97-bf70-4f30-8424-5db6b70e5677 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1054.205690] env[68569]: DEBUG oslo_concurrency.lockutils [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] Acquiring lock "d7a0631c-902d-4653-b900-2123de5bcb44-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.205897] env[68569]: DEBUG oslo_concurrency.lockutils [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] Lock "d7a0631c-902d-4653-b900-2123de5bcb44-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.206157] env[68569]: DEBUG oslo_concurrency.lockutils [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] Lock "d7a0631c-902d-4653-b900-2123de5bcb44-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.206403] env[68569]: DEBUG nova.compute.manager [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] No waiting events found dispatching network-vif-plugged-e9d20c97-bf70-4f30-8424-5db6b70e5677 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1054.206578] env[68569]: WARNING nova.compute.manager [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Received unexpected event network-vif-plugged-e9d20c97-bf70-4f30-8424-5db6b70e5677 for instance with vm_state building and task_state spawning. [ 1054.206753] env[68569]: DEBUG nova.compute.manager [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Received event network-changed-e9d20c97-bf70-4f30-8424-5db6b70e5677 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1054.206908] env[68569]: DEBUG nova.compute.manager [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Refreshing instance network info cache due to event network-changed-e9d20c97-bf70-4f30-8424-5db6b70e5677. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1054.207291] env[68569]: DEBUG oslo_concurrency.lockutils [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] Acquiring lock "refresh_cache-d7a0631c-902d-4653-b900-2123de5bcb44" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.370555] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.370891] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.371026] env[68569]: DEBUG nova.network.neutron [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1054.387879] env[68569]: INFO nova.compute.manager [None req-23031cd3-2ac9-485f-9d48-3188302efc37 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Resuming [ 1054.388534] env[68569]: DEBUG nova.objects.instance [None req-23031cd3-2ac9-485f-9d48-3188302efc37 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lazy-loading 'flavor' on Instance uuid c0ea0ef8-93c2-416a-8caa-a51f7a39627e {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.419054] env[68569]: DEBUG nova.network.neutron [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Updating instance_info_cache with network_info: [{"id": "e9d20c97-bf70-4f30-8424-5db6b70e5677", "address": "fa:16:3e:69:64:5d", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9d20c97-bf", "ovs_interfaceid": "e9d20c97-bf70-4f30-8424-5db6b70e5677", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.505882] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "6824efd5-427b-420d-83d5-a1d5acd94bf9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.505882] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "6824efd5-427b-420d-83d5-a1d5acd94bf9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.506211] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "6824efd5-427b-420d-83d5-a1d5acd94bf9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.506324] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "6824efd5-427b-420d-83d5-a1d5acd94bf9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.507074] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "6824efd5-427b-420d-83d5-a1d5acd94bf9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.508924] env[68569]: DEBUG oslo_concurrency.lockutils [req-e068051e-c71b-4e84-af27-79961b2ba47e req-3581fcc1-439d-41ab-8a67-458d07ca2fe5 service nova] Releasing lock "refresh_cache-6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1054.509490] env[68569]: INFO nova.compute.manager [-] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Took 1.03 seconds to deallocate network for instance. [ 1054.510112] env[68569]: INFO nova.compute.manager [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Terminating instance [ 1054.558026] env[68569]: DEBUG nova.compute.manager [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1054.558026] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1054.559396] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4704f2b6-b9f9-4ea1-96a9-c5e0653c6c7f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.570649] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1054.570932] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9710de2-5686-4aa7-82ee-e75a11f451f2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.577797] env[68569]: DEBUG oslo_vmware.api [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1054.577797] env[68569]: value = "task-3167599" [ 1054.577797] env[68569]: _type = "Task" [ 1054.577797] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.586198] env[68569]: DEBUG oslo_vmware.api [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.654710] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5244e326-5915-4fbf-135d-020bf9e249c4, 'name': SearchDatastore_Task, 'duration_secs': 0.014747} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.655187] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1054.655309] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1054.655568] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.655732] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.655918] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1054.656202] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee819a75-8fc8-4d31-86e6-02dd2ed61f3a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.676199] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1054.676634] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1054.677365] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af91e1c2-62b2-4394-b1cd-5f8f2506b5e9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.683546] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1054.683546] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f84de8-d717-8a2b-adae-3b8847a897e8" [ 1054.683546] env[68569]: _type = "Task" [ 1054.683546] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.692401] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f84de8-d717-8a2b-adae-3b8847a897e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.921602] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "refresh_cache-d7a0631c-902d-4653-b900-2123de5bcb44" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1054.921972] env[68569]: DEBUG nova.compute.manager [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Instance network_info: |[{"id": "e9d20c97-bf70-4f30-8424-5db6b70e5677", "address": "fa:16:3e:69:64:5d", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9d20c97-bf", "ovs_interfaceid": "e9d20c97-bf70-4f30-8424-5db6b70e5677", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1054.922345] env[68569]: DEBUG oslo_concurrency.lockutils [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] Acquired lock "refresh_cache-d7a0631c-902d-4653-b900-2123de5bcb44" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.922726] env[68569]: DEBUG nova.network.neutron [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Refreshing network info cache for port e9d20c97-bf70-4f30-8424-5db6b70e5677 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1054.925395] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:64:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db328342-7107-4bac-b1d6-111fbd5780f1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e9d20c97-bf70-4f30-8424-5db6b70e5677', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1054.933729] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1054.939880] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1054.939880] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2539160c-f156-4fcf-b1bd-8037927a4f0e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.961887] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1054.961887] env[68569]: value = "task-3167600" [ 1054.961887] env[68569]: _type = "Task" [ 1054.961887] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.970281] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167600, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.018493] env[68569]: DEBUG nova.compute.manager [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1055.018746] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1055.020119] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.020511] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.021129] env[68569]: DEBUG nova.objects.instance [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lazy-loading 'resources' on Instance uuid 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.023113] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5e4771-06d0-4d98-99bb-5f3cd7ceb437 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.035960] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1055.036393] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f87c438-89db-4b6e-acbb-1ec855f8255e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.044520] env[68569]: DEBUG oslo_vmware.api [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1055.044520] env[68569]: value = "task-3167601" [ 1055.044520] env[68569]: _type = "Task" [ 1055.044520] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.057725] env[68569]: DEBUG oslo_vmware.api [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167601, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.090852] env[68569]: DEBUG oslo_vmware.api [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167599, 'name': PowerOffVM_Task, 'duration_secs': 0.317787} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.091133] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1055.091295] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1055.091599] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-464aed8f-fbe2-4cf8-9c2d-261a965c8501 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.164336] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1055.164586] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1055.164793] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleting the datastore file [datastore2] 16112ff1-bda8-4a20-b69c-b847ade376b4 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1055.165390] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ebc9461-3ffe-4fd3-9072-eb19044ecfff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.171554] env[68569]: DEBUG oslo_vmware.api [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for the task: (returnval){ [ 1055.171554] env[68569]: value = "task-3167603" [ 1055.171554] env[68569]: _type = "Task" [ 1055.171554] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.180756] env[68569]: DEBUG oslo_vmware.api [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167603, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.202426] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f84de8-d717-8a2b-adae-3b8847a897e8, 'name': SearchDatastore_Task, 'duration_secs': 0.011741} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.203690] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5a7d508-c452-45ff-91ed-33f76cea2f3f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.211956] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1055.211956] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a069ae-1159-27fb-6046-15a97efa6ac9" [ 1055.211956] env[68569]: _type = "Task" [ 1055.211956] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.220340] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a069ae-1159-27fb-6046-15a97efa6ac9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.228931] env[68569]: INFO nova.network.neutron [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Port ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1055.229446] env[68569]: DEBUG nova.network.neutron [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Updating instance_info_cache with network_info: [{"id": "324818fd-bb45-4575-9e4a-bb6516576dde", "address": "fa:16:3e:2b:19:03", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap324818fd-bb", "ovs_interfaceid": "324818fd-bb45-4575-9e4a-bb6516576dde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.441973] env[68569]: DEBUG nova.network.neutron [-] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.475882] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167600, 'name': CreateVM_Task, 'duration_secs': 0.417439} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.478643] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1055.479379] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.479579] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.479913] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1055.480522] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d74d69a-5060-4676-8cfb-e2f86e538d43 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.485638] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1055.485638] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52252367-b81d-d434-ab17-66e765f8ac52" [ 1055.485638] env[68569]: _type = "Task" [ 1055.485638] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.493981] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52252367-b81d-d434-ab17-66e765f8ac52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.554601] env[68569]: DEBUG oslo_vmware.api [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167601, 'name': PowerOffVM_Task, 'duration_secs': 0.269258} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.554899] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1055.555099] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1055.555375] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1870b3d6-726c-473c-ac4b-b7498a905f6f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.616106] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1055.616367] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1055.616550] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Deleting the datastore file [datastore1] 6824efd5-427b-420d-83d5-a1d5acd94bf9 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1055.617150] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4a218c2-a1f3-4f1c-a8c9-3c3bced99c84 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.624703] env[68569]: DEBUG oslo_vmware.api [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1055.624703] env[68569]: value = "task-3167605" [ 1055.624703] env[68569]: _type = "Task" [ 1055.624703] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.640615] env[68569]: DEBUG oslo_vmware.api [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167605, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.681781] env[68569]: DEBUG oslo_vmware.api [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Task: {'id': task-3167603, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.260241} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.682060] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1055.682244] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1055.682417] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1055.682634] env[68569]: INFO nova.compute.manager [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1055.682884] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1055.683103] env[68569]: DEBUG nova.compute.manager [-] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1055.683201] env[68569]: DEBUG nova.network.neutron [-] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1055.713991] env[68569]: DEBUG nova.network.neutron [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Updated VIF entry in instance network info cache for port e9d20c97-bf70-4f30-8424-5db6b70e5677. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1055.714719] env[68569]: DEBUG nova.network.neutron [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Updating instance_info_cache with network_info: [{"id": "e9d20c97-bf70-4f30-8424-5db6b70e5677", "address": "fa:16:3e:69:64:5d", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9d20c97-bf", "ovs_interfaceid": "e9d20c97-bf70-4f30-8424-5db6b70e5677", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.725425] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a069ae-1159-27fb-6046-15a97efa6ac9, 'name': SearchDatastore_Task, 'duration_secs': 0.022556} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.728706] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1055.728972] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3/6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1055.729792] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-830f3cb0-ce70-41da-bb07-4ebcef17d4e7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.732449] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "refresh_cache-6824efd5-427b-420d-83d5-a1d5acd94bf9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1055.739029] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1055.739029] env[68569]: value = "task-3167606" [ 1055.739029] env[68569]: _type = "Task" [ 1055.739029] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.750862] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167606, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.800739] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba235873-a438-474a-bf91-9d89e0fcdb7b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.810194] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a0d670-eb3b-4758-b362-99548059577e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.842050] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9cb2ea-2482-4e81-ab91-9f3130f81b29 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.849733] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850c6130-948b-45c5-9fe5-351119640b23 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.864544] env[68569]: DEBUG nova.compute.provider_tree [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1055.898700] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23031cd3-2ac9-485f-9d48-3188302efc37 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.898830] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23031cd3-2ac9-485f-9d48-3188302efc37 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquired lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.898955] env[68569]: DEBUG nova.network.neutron [None req-23031cd3-2ac9-485f-9d48-3188302efc37 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1055.944692] env[68569]: INFO nova.compute.manager [-] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Took 2.26 seconds to deallocate network for instance. [ 1055.998670] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52252367-b81d-d434-ab17-66e765f8ac52, 'name': SearchDatastore_Task, 'duration_secs': 0.010908} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.998997] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1055.999410] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1055.999666] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.999814] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.999994] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1056.000700] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73734a7e-881d-42c1-94c5-75088cb7906b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.017386] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1056.017594] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1056.018386] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a637146a-fd93-4772-a460-72281da91c8e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.025486] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1056.025486] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522af649-934b-ebe4-b29a-93734d2baec9" [ 1056.025486] env[68569]: _type = "Task" [ 1056.025486] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.033947] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522af649-934b-ebe4-b29a-93734d2baec9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.139016] env[68569]: DEBUG oslo_vmware.api [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167605, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.294539} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.139354] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1056.139603] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1056.140171] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1056.140171] env[68569]: INFO nova.compute.manager [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1056.140414] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1056.140690] env[68569]: DEBUG nova.compute.manager [-] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1056.140817] env[68569]: DEBUG nova.network.neutron [-] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1056.220344] env[68569]: DEBUG oslo_concurrency.lockutils [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] Releasing lock "refresh_cache-d7a0631c-902d-4653-b900-2123de5bcb44" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1056.220685] env[68569]: DEBUG nova.compute.manager [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Received event network-vif-deleted-ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1056.220885] env[68569]: INFO nova.compute.manager [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Neutron deleted interface ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11; detaching it from the instance and deleting it from the info cache [ 1056.221171] env[68569]: DEBUG nova.network.neutron [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Updating instance_info_cache with network_info: [{"id": "324818fd-bb45-4575-9e4a-bb6516576dde", "address": "fa:16:3e:2b:19:03", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap324818fd-bb", "ovs_interfaceid": "324818fd-bb45-4575-9e4a-bb6516576dde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.236637] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8253dc6a-dd49-4aa7-9b0a-b687fbe037fd tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-6824efd5-427b-420d-83d5-a1d5acd94bf9-ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.744s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.241226] env[68569]: DEBUG nova.compute.manager [req-8b62b129-600b-4273-a90b-d172c13d1007 req-ec48c51a-4290-43ba-8e86-3849810086a7 service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Received event network-vif-deleted-5d01ae9c-544e-4483-8eaf-ac68ad7945ec {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1056.241456] env[68569]: DEBUG nova.compute.manager [req-8b62b129-600b-4273-a90b-d172c13d1007 req-ec48c51a-4290-43ba-8e86-3849810086a7 service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Received event network-vif-deleted-2dc177dd-5fc5-42f5-b1a0-b6c86f70d82f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1056.241639] env[68569]: DEBUG nova.compute.manager [req-8b62b129-600b-4273-a90b-d172c13d1007 req-ec48c51a-4290-43ba-8e86-3849810086a7 service nova] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Received event network-vif-deleted-ea020883-6378-448b-acd5-bd2f12b42b10 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1056.241837] env[68569]: DEBUG nova.compute.manager [req-8b62b129-600b-4273-a90b-d172c13d1007 req-ec48c51a-4290-43ba-8e86-3849810086a7 service nova] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Received event network-vif-deleted-5eb797c4-637c-475d-b402-04df004a7182 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1056.241985] env[68569]: INFO nova.compute.manager [req-8b62b129-600b-4273-a90b-d172c13d1007 req-ec48c51a-4290-43ba-8e86-3849810086a7 service nova] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Neutron deleted interface 5eb797c4-637c-475d-b402-04df004a7182; detaching it from the instance and deleting it from the info cache [ 1056.242160] env[68569]: DEBUG nova.network.neutron [req-8b62b129-600b-4273-a90b-d172c13d1007 req-ec48c51a-4290-43ba-8e86-3849810086a7 service nova] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.257820] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167606, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.393392] env[68569]: ERROR nova.scheduler.client.report [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [req-df168bad-04e3-4871-9b12-ea227561b65f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-df168bad-04e3-4871-9b12-ea227561b65f"}]} [ 1056.410201] env[68569]: DEBUG nova.scheduler.client.report [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1056.429031] env[68569]: DEBUG nova.scheduler.client.report [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1056.429223] env[68569]: DEBUG nova.compute.provider_tree [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1056.441355] env[68569]: DEBUG nova.scheduler.client.report [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1056.451478] env[68569]: DEBUG oslo_concurrency.lockutils [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.460500] env[68569]: DEBUG nova.scheduler.client.report [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1056.537882] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522af649-934b-ebe4-b29a-93734d2baec9, 'name': SearchDatastore_Task, 'duration_secs': 0.060166} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.538595] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ee75d9d-9515-4b59-9ee9-0cc70c6f712c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.546588] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1056.546588] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52da7fea-bb85-02a4-bb4d-97780ed9b8be" [ 1056.546588] env[68569]: _type = "Task" [ 1056.546588] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.560379] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52da7fea-bb85-02a4-bb4d-97780ed9b8be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.585633] env[68569]: DEBUG nova.network.neutron [-] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.723835] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0fec2c04-c14f-4e77-a2c0-a7e4673fbbc4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.735461] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83bb411-3228-4248-ba95-b93a5c4dbe62 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.751613] env[68569]: DEBUG nova.network.neutron [None req-23031cd3-2ac9-485f-9d48-3188302efc37 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Updating instance_info_cache with network_info: [{"id": "d13b7716-5b05-4896-9da9-e3674d55a3a0", "address": "fa:16:3e:c3:61:8b", "network": {"id": "63b6f3d4-66a9-477c-b7a6-566cf0d9ab2d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-638108957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b924e47d91a488e9d912a5369792aa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd13b7716-5b", "ovs_interfaceid": "d13b7716-5b05-4896-9da9-e3674d55a3a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.755037] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1dc92118-e2a2-4ced-92ff-c8d422afc3fe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.766530] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167606, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631517} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.767703] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3/6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1056.768026] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1056.769096] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0513701-c2cb-41ea-ba9c-b2f6b1aab0bf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.785099] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5382710e-0543-4aea-b0ba-1006726e950d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.787860] env[68569]: DEBUG nova.compute.manager [req-24311624-0ddc-4c13-82d0-d4b63974c100 req-c95bbfa8-1e32-4f97-b648-4c4a02511790 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Detach interface failed, port_id=ad2700ea-e58c-4c0e-9eeb-04c2b5ea0b11, reason: Instance 6824efd5-427b-420d-83d5-a1d5acd94bf9 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1056.792431] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7bfa25e-5700-4bd2-8c9f-a56ca4c0ac34 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.808361] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.808682] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.811024] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a38bf8-271f-45d2-adf5-2c97bf48f4a8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.816632] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1056.816632] env[68569]: value = "task-3167607" [ 1056.816632] env[68569]: _type = "Task" [ 1056.816632] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.861629] env[68569]: DEBUG nova.compute.manager [req-8b62b129-600b-4273-a90b-d172c13d1007 req-ec48c51a-4290-43ba-8e86-3849810086a7 service nova] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Detach interface failed, port_id=5eb797c4-637c-475d-b402-04df004a7182, reason: Instance 16112ff1-bda8-4a20-b69c-b847ade376b4 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1056.864568] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980578ed-f7f4-44c6-b1d1-0914c6fe3e68 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.871487] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167607, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.877179] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e702d90f-a232-42f7-97a9-ce0b8aa23f23 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.892062] env[68569]: DEBUG nova.compute.provider_tree [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1057.061252] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52da7fea-bb85-02a4-bb4d-97780ed9b8be, 'name': SearchDatastore_Task, 'duration_secs': 0.026068} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.061543] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.061930] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] d7a0631c-902d-4653-b900-2123de5bcb44/d7a0631c-902d-4653-b900-2123de5bcb44.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1057.062251] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eade4846-570f-4d5f-8f8e-2cb5e0d8563f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.068563] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1057.068563] env[68569]: value = "task-3167608" [ 1057.068563] env[68569]: _type = "Task" [ 1057.068563] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.077537] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167608, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.087759] env[68569]: INFO nova.compute.manager [-] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Took 1.40 seconds to deallocate network for instance. [ 1057.255971] env[68569]: DEBUG oslo_concurrency.lockutils [None req-23031cd3-2ac9-485f-9d48-3188302efc37 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Releasing lock "refresh_cache-c0ea0ef8-93c2-416a-8caa-a51f7a39627e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.257354] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ee2e29-4207-4d01-80d3-58cb221529a2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.265935] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-23031cd3-2ac9-485f-9d48-3188302efc37 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Resuming the VM {{(pid=68569) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1057.265935] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e06bfb5-479d-4cad-8252-39bb76338de3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.274325] env[68569]: DEBUG oslo_vmware.api [None req-23031cd3-2ac9-485f-9d48-3188302efc37 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 1057.274325] env[68569]: value = "task-3167609" [ 1057.274325] env[68569]: _type = "Task" [ 1057.274325] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.282310] env[68569]: DEBUG oslo_vmware.api [None req-23031cd3-2ac9-485f-9d48-3188302efc37 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167609, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.317047] env[68569]: DEBUG nova.compute.manager [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1057.329494] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167607, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109262} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.329702] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1057.330516] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a844689-4e98-425a-a8f2-3e621e11730f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.353306] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3/6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1057.353845] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fbc69fec-db47-4837-ac51-09bdfbd18156 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.371636] env[68569]: DEBUG nova.network.neutron [-] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.382019] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1057.382019] env[68569]: value = "task-3167610" [ 1057.382019] env[68569]: _type = "Task" [ 1057.382019] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.391573] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167610, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.395048] env[68569]: DEBUG nova.scheduler.client.report [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1057.580113] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167608, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.594567] env[68569]: DEBUG oslo_concurrency.lockutils [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.785055] env[68569]: DEBUG oslo_vmware.api [None req-23031cd3-2ac9-485f-9d48-3188302efc37 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167609, 'name': PowerOnVM_Task} progress is 93%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.845479] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.877457] env[68569]: INFO nova.compute.manager [-] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Took 1.74 seconds to deallocate network for instance. [ 1057.898462] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167610, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.900516] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.880s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.902853] env[68569]: DEBUG oslo_concurrency.lockutils [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.452s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.903112] env[68569]: DEBUG nova.objects.instance [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lazy-loading 'resources' on Instance uuid 4c122cff-f64c-4e4f-9454-034c44ff246b {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.925592] env[68569]: INFO nova.scheduler.client.report [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Deleted allocations for instance 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc [ 1058.080846] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167608, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.585781} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.081149] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] d7a0631c-902d-4653-b900-2123de5bcb44/d7a0631c-902d-4653-b900-2123de5bcb44.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1058.081365] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1058.081737] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c99d37a-3fb9-4d85-8946-8fa19139e236 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.089407] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1058.089407] env[68569]: value = "task-3167611" [ 1058.089407] env[68569]: _type = "Task" [ 1058.089407] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.099224] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167611, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.123626] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfde3a-5659-c1c5-aae1-d7bad0e4405f/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1058.124581] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b99225-b3e1-4027-8cd9-d0f7c346f30f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.130624] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfde3a-5659-c1c5-aae1-d7bad0e4405f/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1058.130790] env[68569]: ERROR oslo_vmware.rw_handles [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfde3a-5659-c1c5-aae1-d7bad0e4405f/disk-0.vmdk due to incomplete transfer. [ 1058.131038] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7acf5c1e-698f-4616-8541-caf7a8cb4149 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.137624] env[68569]: DEBUG oslo_vmware.rw_handles [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfde3a-5659-c1c5-aae1-d7bad0e4405f/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1058.137815] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Uploaded image 1146c72a-4d3b-4831-9e73-15f957a6f7be to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1058.140220] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1058.140506] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ffc2eab1-4100-44cb-8660-8afdf4c73a99 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.145952] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1058.145952] env[68569]: value = "task-3167612" [ 1058.145952] env[68569]: _type = "Task" [ 1058.145952] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.153925] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167612, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.268981] env[68569]: DEBUG nova.compute.manager [req-fc2c5cf0-0d75-4a9f-8488-ea7f41f2534b req-fa975228-e0ab-43da-9a39-e0e353c23824 service nova] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Received event network-vif-deleted-324818fd-bb45-4575-9e4a-bb6516576dde {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1058.283765] env[68569]: DEBUG oslo_vmware.api [None req-23031cd3-2ac9-485f-9d48-3188302efc37 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167609, 'name': PowerOnVM_Task, 'duration_secs': 0.652764} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.284036] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-23031cd3-2ac9-485f-9d48-3188302efc37 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Resumed the VM {{(pid=68569) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1058.284222] env[68569]: DEBUG nova.compute.manager [None req-23031cd3-2ac9-485f-9d48-3188302efc37 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1058.284960] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00f2eb1-d047-40ad-bfb3-be54a817727d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.387686] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.392081] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167610, 'name': ReconfigVM_Task, 'duration_secs': 0.572504} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.392365] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3/6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1058.392970] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a2734314-a491-4b61-9bb3-1c16c6933de2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.399245] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1058.399245] env[68569]: value = "task-3167613" [ 1058.399245] env[68569]: _type = "Task" [ 1058.399245] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.410741] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167613, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.435479] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9bd2ff24-e249-4592-a97b-912eca4a308a tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.617s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.599321] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167611, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072905} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.601898] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1058.602882] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677b31e5-dd93-4aa3-bb05-7b171dd7a906 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.626257] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] d7a0631c-902d-4653-b900-2123de5bcb44/d7a0631c-902d-4653-b900-2123de5bcb44.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1058.629080] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f18674c-7a08-46e1-b92e-3737d539aadd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.651110] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1058.651110] env[68569]: value = "task-3167614" [ 1058.651110] env[68569]: _type = "Task" [ 1058.651110] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.657818] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167612, 'name': Destroy_Task, 'duration_secs': 0.330088} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.658451] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf12f7a-4976-40b2-bbef-42351c3506cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.660980] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Destroyed the VM [ 1058.661272] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1058.661508] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-997ab9f8-550a-4cd5-b57a-e8c5ae61d3e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.666390] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.669303] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbdc004-64cb-4381-b290-cd71d60c6b7e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.674518] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1058.674518] env[68569]: value = "task-3167615" [ 1058.674518] env[68569]: _type = "Task" [ 1058.674518] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.705196] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52076cec-0c92-474f-b889-eda34563874f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.710977] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167615, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.715707] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7af1156-53ea-4237-90b5-139d5fa7a572 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.730998] env[68569]: DEBUG nova.compute.provider_tree [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1058.911919] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167613, 'name': Rename_Task, 'duration_secs': 0.147645} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.911919] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1058.911919] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79dfc4e0-ee31-4e9e-89f9-0b41a4fef003 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.919792] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1058.919792] env[68569]: value = "task-3167616" [ 1058.919792] env[68569]: _type = "Task" [ 1058.919792] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.927665] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167616, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.963647] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "7b10cfb4-dc0a-4311-a24f-7a25869ef594" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.963647] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "7b10cfb4-dc0a-4311-a24f-7a25869ef594" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.963647] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "7b10cfb4-dc0a-4311-a24f-7a25869ef594-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.963745] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "7b10cfb4-dc0a-4311-a24f-7a25869ef594-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.963879] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "7b10cfb4-dc0a-4311-a24f-7a25869ef594-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.966337] env[68569]: INFO nova.compute.manager [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Terminating instance [ 1059.161470] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167614, 'name': ReconfigVM_Task, 'duration_secs': 0.28654} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.161829] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Reconfigured VM instance instance-00000062 to attach disk [datastore1] d7a0631c-902d-4653-b900-2123de5bcb44/d7a0631c-902d-4653-b900-2123de5bcb44.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1059.162410] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-189080bb-bc51-4ed7-a0b1-beaf4bea6c09 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.168429] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1059.168429] env[68569]: value = "task-3167617" [ 1059.168429] env[68569]: _type = "Task" [ 1059.168429] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.176028] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167617, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.183287] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167615, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.253802] env[68569]: ERROR nova.scheduler.client.report [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [req-09030593-e95f-446b-9e47-a00d50855207] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-09030593-e95f-446b-9e47-a00d50855207"}]} [ 1059.274350] env[68569]: DEBUG nova.scheduler.client.report [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1059.288065] env[68569]: DEBUG nova.scheduler.client.report [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1059.288203] env[68569]: DEBUG nova.compute.provider_tree [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1059.300863] env[68569]: DEBUG nova.scheduler.client.report [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1059.320638] env[68569]: DEBUG nova.scheduler.client.report [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1059.432146] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167616, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.471387] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "refresh_cache-7b10cfb4-dc0a-4311-a24f-7a25869ef594" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.471387] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquired lock "refresh_cache-7b10cfb4-dc0a-4311-a24f-7a25869ef594" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.471646] env[68569]: DEBUG nova.network.neutron [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1059.572615] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "5eb7ff96-3a9e-470d-9cbe-644446302ecf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.576188] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "5eb7ff96-3a9e-470d-9cbe-644446302ecf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.600932] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bde230a-9d49-4705-8298-979e8e44efda {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.610279] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914433f1-301b-4002-8222-bf35f805dded {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.643804] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb143e9-e8de-459d-a925-40e6e9653635 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.651744] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ec6b20-d60b-4904-b8be-f65cf6334f54 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.667084] env[68569]: DEBUG nova.compute.provider_tree [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1059.676835] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167617, 'name': Rename_Task, 'duration_secs': 0.175763} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.679473] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1059.679696] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-421f44ae-574f-4964-99d7-4fcb9bcdbec7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.686783] env[68569]: DEBUG oslo_vmware.api [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167615, 'name': RemoveSnapshot_Task, 'duration_secs': 0.784912} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.691034] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1059.691034] env[68569]: INFO nova.compute.manager [None req-69ddbae4-2dd1-401e-a4e0-7ca682cde75f tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Took 19.24 seconds to snapshot the instance on the hypervisor. [ 1059.691034] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1059.691034] env[68569]: value = "task-3167618" [ 1059.691034] env[68569]: _type = "Task" [ 1059.691034] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.698253] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167618, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.929702] env[68569]: DEBUG oslo_vmware.api [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167616, 'name': PowerOnVM_Task, 'duration_secs': 0.521054} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.929971] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1059.930221] env[68569]: INFO nova.compute.manager [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Took 9.62 seconds to spawn the instance on the hypervisor. [ 1059.930419] env[68569]: DEBUG nova.compute.manager [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1059.932137] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd7fe9c7-d0ad-40f1-b52f-d030481f53f4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.996638] env[68569]: DEBUG nova.network.neutron [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1060.042678] env[68569]: DEBUG nova.network.neutron [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.085924] env[68569]: DEBUG nova.compute.manager [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1060.204963] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167618, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.205920] env[68569]: DEBUG nova.scheduler.client.report [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 145 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1060.206208] env[68569]: DEBUG nova.compute.provider_tree [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 145 to 146 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1060.206364] env[68569]: DEBUG nova.compute.provider_tree [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1060.451454] env[68569]: INFO nova.compute.manager [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Took 21.69 seconds to build instance. [ 1060.545252] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Releasing lock "refresh_cache-7b10cfb4-dc0a-4311-a24f-7a25869ef594" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.545742] env[68569]: DEBUG nova.compute.manager [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1060.546138] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1060.547162] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20eec39b-aaca-4393-9fa1-d649b2ca4d66 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.556169] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1060.556169] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3566fcbb-3809-4f3c-a1c6-5488f551eb45 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.562739] env[68569]: DEBUG oslo_vmware.api [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1060.562739] env[68569]: value = "task-3167619" [ 1060.562739] env[68569]: _type = "Task" [ 1060.562739] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.571319] env[68569]: DEBUG oslo_vmware.api [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167619, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.613027] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.703245] env[68569]: DEBUG oslo_vmware.api [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167618, 'name': PowerOnVM_Task, 'duration_secs': 1.004375} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.703527] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1060.703811] env[68569]: INFO nova.compute.manager [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Took 8.42 seconds to spawn the instance on the hypervisor. [ 1060.703894] env[68569]: DEBUG nova.compute.manager [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1060.704703] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7dbce8-9a26-4ca9-a667-81fa270b8245 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.711237] env[68569]: DEBUG oslo_concurrency.lockutils [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.808s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.715893] env[68569]: DEBUG oslo_concurrency.lockutils [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.122s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.716069] env[68569]: DEBUG nova.objects.instance [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lazy-loading 'resources' on Instance uuid 16112ff1-bda8-4a20-b69c-b847ade376b4 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1060.733859] env[68569]: INFO nova.scheduler.client.report [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Deleted allocations for instance 4c122cff-f64c-4e4f-9454-034c44ff246b [ 1060.953249] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc7331c8-1e9b-4f7e-b23e-4512967d7a67 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.199s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.072855] env[68569]: DEBUG oslo_vmware.api [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167619, 'name': PowerOffVM_Task, 'duration_secs': 0.140379} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.073139] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1061.073306] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1061.073563] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d9fcc24-0cc2-46a9-8ffe-91f2ae696f9f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.097794] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1061.098067] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1061.098225] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Deleting the datastore file [datastore1] 7b10cfb4-dc0a-4311-a24f-7a25869ef594 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1061.098505] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-861aab3a-1f09-480a-9fc1-9ac581767b07 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.105087] env[68569]: DEBUG oslo_vmware.api [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for the task: (returnval){ [ 1061.105087] env[68569]: value = "task-3167621" [ 1061.105087] env[68569]: _type = "Task" [ 1061.105087] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.112884] env[68569]: DEBUG oslo_vmware.api [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167621, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.232096] env[68569]: INFO nova.compute.manager [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Took 14.39 seconds to build instance. [ 1061.245529] env[68569]: DEBUG oslo_concurrency.lockutils [None req-062979cd-0302-4605-9f63-9d4b9892235b tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "4c122cff-f64c-4e4f-9454-034c44ff246b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.290s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.347945] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "7c71799e-21d9-41f8-b35c-7117354a0287" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.348122] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "7c71799e-21d9-41f8-b35c-7117354a0287" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.431687] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326da851-97aa-4996-aab7-e7137c718b14 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.440680] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f12a157-cd69-49bc-98c0-9a120fd224bd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.470014] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f205403-bac0-4c75-a2e0-eca65baa5680 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.477403] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13118a03-11c8-44ad-a806-e39ed073bd3d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.491293] env[68569]: DEBUG nova.compute.provider_tree [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1061.614787] env[68569]: DEBUG oslo_vmware.api [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Task: {'id': task-3167621, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131332} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.615261] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1061.615261] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1061.615408] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1061.615576] env[68569]: INFO nova.compute.manager [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1061.615812] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1061.616022] env[68569]: DEBUG nova.compute.manager [-] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1061.616121] env[68569]: DEBUG nova.network.neutron [-] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1061.633494] env[68569]: DEBUG nova.network.neutron [-] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1061.736136] env[68569]: DEBUG oslo_concurrency.lockutils [None req-42956307-405f-4e73-a4fd-8c743ab2ca34 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "d7a0631c-902d-4653-b900-2123de5bcb44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.897s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.853357] env[68569]: DEBUG nova.compute.manager [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1061.998436] env[68569]: DEBUG nova.scheduler.client.report [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1062.136024] env[68569]: DEBUG nova.network.neutron [-] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.381697] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.410804] env[68569]: DEBUG nova.compute.manager [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Stashing vm_state: active {{(pid=68569) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1062.503351] env[68569]: DEBUG oslo_concurrency.lockutils [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.787s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.508128] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.662s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.510486] env[68569]: INFO nova.compute.claims [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1062.527054] env[68569]: INFO nova.scheduler.client.report [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Deleted allocations for instance 16112ff1-bda8-4a20-b69c-b847ade376b4 [ 1062.638313] env[68569]: INFO nova.compute.manager [-] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Took 1.02 seconds to deallocate network for instance. [ 1062.928985] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.033946] env[68569]: DEBUG oslo_concurrency.lockutils [None req-10049c89-2798-4296-a21c-c88721744636 tempest-ServerDiskConfigTestJSON-1172398095 tempest-ServerDiskConfigTestJSON-1172398095-project-member] Lock "16112ff1-bda8-4a20-b69c-b847ade376b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.982s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.145165] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.301071] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.302568] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.302568] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.302568] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.302568] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.304148] env[68569]: INFO nova.compute.manager [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Terminating instance [ 1063.782054] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203ed90a-f852-4675-926d-1c2fd66dbf83 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.791237] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02031bb-6b63-46a9-96ea-4b48e7fe2eff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.826172] env[68569]: DEBUG nova.compute.manager [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1063.826673] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1063.829287] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc112b3-212f-4a33-ab73-d8691bec8746 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.832619] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77b34cb-1020-4c25-8eee-ef37bb345b35 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.838775] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "fd34691f-ebe5-4b40-994c-6278e09fc9eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.839189] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "fd34691f-ebe5-4b40-994c-6278e09fc9eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.849604] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43da47a8-fec3-4b69-b4bb-cbaa2dbf4bda {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.856664] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1063.857436] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fffe8b88-4dce-422e-934e-8bc918485b95 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.870036] env[68569]: DEBUG nova.compute.provider_tree [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.875188] env[68569]: DEBUG oslo_vmware.api [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 1063.875188] env[68569]: value = "task-3167622" [ 1063.875188] env[68569]: _type = "Task" [ 1063.875188] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.883529] env[68569]: DEBUG oslo_vmware.api [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167622, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.341706] env[68569]: DEBUG nova.compute.manager [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1064.374057] env[68569]: DEBUG nova.scheduler.client.report [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1064.386944] env[68569]: DEBUG oslo_vmware.api [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167622, 'name': PowerOffVM_Task, 'duration_secs': 0.226573} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.386944] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1064.387119] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1064.387852] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f17965e-e3d2-4ff4-be8d-fc6057e45d08 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.449662] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1064.451027] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1064.451216] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Deleting the datastore file [datastore2] c0ea0ef8-93c2-416a-8caa-a51f7a39627e {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1064.451515] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31fcd04b-7ade-4205-b700-b8381415aeb8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.460492] env[68569]: DEBUG oslo_vmware.api [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for the task: (returnval){ [ 1064.460492] env[68569]: value = "task-3167624" [ 1064.460492] env[68569]: _type = "Task" [ 1064.460492] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.469343] env[68569]: DEBUG oslo_vmware.api [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167624, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.862716] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.881746] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.374s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.882268] env[68569]: DEBUG nova.compute.manager [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1064.885880] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.498s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.886109] env[68569]: DEBUG nova.objects.instance [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'resources' on Instance uuid 6824efd5-427b-420d-83d5-a1d5acd94bf9 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.890183] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.890441] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.002s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.969882] env[68569]: DEBUG oslo_vmware.api [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Task: {'id': task-3167624, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288434} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.970196] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1064.970384] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1064.970553] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1064.970719] env[68569]: INFO nova.compute.manager [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1064.970947] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1064.971141] env[68569]: DEBUG nova.compute.manager [-] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1064.971234] env[68569]: DEBUG nova.network.neutron [-] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1065.318348] env[68569]: DEBUG nova.compute.manager [req-90b3a994-9ded-4b2d-998e-161637860acf req-c5db9643-f0a5-4e3c-9b46-5cb59d03ac87 service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Received event network-vif-deleted-d13b7716-5b05-4896-9da9-e3674d55a3a0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1065.318348] env[68569]: INFO nova.compute.manager [req-90b3a994-9ded-4b2d-998e-161637860acf req-c5db9643-f0a5-4e3c-9b46-5cb59d03ac87 service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Neutron deleted interface d13b7716-5b05-4896-9da9-e3674d55a3a0; detaching it from the instance and deleting it from the info cache [ 1065.318348] env[68569]: DEBUG nova.network.neutron [req-90b3a994-9ded-4b2d-998e-161637860acf req-c5db9643-f0a5-4e3c-9b46-5cb59d03ac87 service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.389376] env[68569]: DEBUG nova.compute.utils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1065.390721] env[68569]: DEBUG nova.compute.manager [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1065.390882] env[68569]: DEBUG nova.network.neutron [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1065.395651] env[68569]: DEBUG nova.compute.utils [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1065.430969] env[68569]: DEBUG nova.policy [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '842e74e7139540d7a537eb8bd56bca78', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e52d937c83d46daa36746494bd7ccbe', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1065.593099] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a5f1c6-2f5e-4336-9121-8576457e24cc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.600744] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea00cf6-b06f-48d6-bd79-3e54d3618314 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.632123] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88bbd47-64a2-48d7-afd8-8e7845c9aa63 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.642357] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15f5400-7cd5-40c5-b147-1536f6b74d1b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.657610] env[68569]: DEBUG nova.compute.provider_tree [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1065.735481] env[68569]: DEBUG nova.network.neutron [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Successfully created port: 9808ed24-af29-4716-93e9-049a46b044c6 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1065.737580] env[68569]: DEBUG nova.network.neutron [-] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.819972] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e8b62ad8-5463-4320-98b9-500a78b8fbc1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.829392] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0db29e-cab5-4ab6-9759-f28d8956ed98 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.859534] env[68569]: DEBUG nova.compute.manager [req-90b3a994-9ded-4b2d-998e-161637860acf req-c5db9643-f0a5-4e3c-9b46-5cb59d03ac87 service nova] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Detach interface failed, port_id=d13b7716-5b05-4896-9da9-e3674d55a3a0, reason: Instance c0ea0ef8-93c2-416a-8caa-a51f7a39627e could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1065.893851] env[68569]: DEBUG nova.compute.manager [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1065.898183] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.160615] env[68569]: DEBUG nova.scheduler.client.report [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1066.242802] env[68569]: INFO nova.compute.manager [-] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Took 1.27 seconds to deallocate network for instance. [ 1066.665989] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.780s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.668854] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.056s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.670578] env[68569]: INFO nova.compute.claims [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1066.688769] env[68569]: INFO nova.scheduler.client.report [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Deleted allocations for instance 6824efd5-427b-420d-83d5-a1d5acd94bf9 [ 1066.751273] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.909533] env[68569]: DEBUG nova.compute.manager [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1066.936369] env[68569]: DEBUG nova.virt.hardware [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1066.937113] env[68569]: DEBUG nova.virt.hardware [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1066.937113] env[68569]: DEBUG nova.virt.hardware [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1066.937113] env[68569]: DEBUG nova.virt.hardware [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1066.937113] env[68569]: DEBUG nova.virt.hardware [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1066.937294] env[68569]: DEBUG nova.virt.hardware [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1066.937412] env[68569]: DEBUG nova.virt.hardware [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1066.937565] env[68569]: DEBUG nova.virt.hardware [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1066.937724] env[68569]: DEBUG nova.virt.hardware [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1066.937878] env[68569]: DEBUG nova.virt.hardware [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1066.938064] env[68569]: DEBUG nova.virt.hardware [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1066.938944] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba47da9-1c4a-4495-96ec-51043bfdb0a2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.947105] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8b485a-7297-4795-a1ef-5c47f369620a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.970730] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.971014] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.971249] env[68569]: INFO nova.compute.manager [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Attaching volume c8148d10-8bad-49bc-93bf-559f83378a11 to /dev/sdb [ 1067.004925] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33aec36c-c3c5-4752-b44c-942e760041db {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.012017] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a18048c-7b35-4a16-a421-fcfbeb7e839c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.025080] env[68569]: DEBUG nova.virt.block_device [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Updating existing volume attachment record: 7e794756-1d75-4cc1-b15c-172ea7f7659a {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1067.198758] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bdd22966-d1ed-4d2c-8730-af29e0b4dd49 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "6824efd5-427b-420d-83d5-a1d5acd94bf9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.692s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.573225] env[68569]: DEBUG nova.compute.manager [req-b7cd4bd3-4852-43ec-a4d3-aa305bbcfa03 req-0515debf-74fd-4760-8bc3-2c976f6afc12 service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Received event network-vif-plugged-9808ed24-af29-4716-93e9-049a46b044c6 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1067.573515] env[68569]: DEBUG oslo_concurrency.lockutils [req-b7cd4bd3-4852-43ec-a4d3-aa305bbcfa03 req-0515debf-74fd-4760-8bc3-2c976f6afc12 service nova] Acquiring lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.573786] env[68569]: DEBUG oslo_concurrency.lockutils [req-b7cd4bd3-4852-43ec-a4d3-aa305bbcfa03 req-0515debf-74fd-4760-8bc3-2c976f6afc12 service nova] Lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.574458] env[68569]: DEBUG oslo_concurrency.lockutils [req-b7cd4bd3-4852-43ec-a4d3-aa305bbcfa03 req-0515debf-74fd-4760-8bc3-2c976f6afc12 service nova] Lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.574731] env[68569]: DEBUG nova.compute.manager [req-b7cd4bd3-4852-43ec-a4d3-aa305bbcfa03 req-0515debf-74fd-4760-8bc3-2c976f6afc12 service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] No waiting events found dispatching network-vif-plugged-9808ed24-af29-4716-93e9-049a46b044c6 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1067.575022] env[68569]: WARNING nova.compute.manager [req-b7cd4bd3-4852-43ec-a4d3-aa305bbcfa03 req-0515debf-74fd-4760-8bc3-2c976f6afc12 service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Received unexpected event network-vif-plugged-9808ed24-af29-4716-93e9-049a46b044c6 for instance with vm_state building and task_state spawning. [ 1067.631245] env[68569]: DEBUG nova.network.neutron [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Successfully updated port: 9808ed24-af29-4716-93e9-049a46b044c6 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1067.923954] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7c7446-feb4-4db1-bd01-c299b68be4c4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.932244] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549403af-f1a8-4efd-ae2b-8ad09ba8e96d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.963184] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6162fe24-f785-4f39-8da2-773fc9f746c4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.971215] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13cba48-a658-4c43-82be-ac96eb40052f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.984782] env[68569]: DEBUG nova.compute.provider_tree [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1068.137913] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "refresh_cache-24bcffcc-6da1-4ae5-b802-88e9364eaf0e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.138217] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired lock "refresh_cache-24bcffcc-6da1-4ae5-b802-88e9364eaf0e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.138484] env[68569]: DEBUG nova.network.neutron [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1068.287714] env[68569]: INFO nova.compute.manager [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Rebuilding instance [ 1068.332035] env[68569]: DEBUG nova.compute.manager [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1068.334289] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fa669b-22b1-4947-9ff6-81b91aba36fb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.489836] env[68569]: DEBUG nova.scheduler.client.report [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1068.686925] env[68569]: DEBUG nova.network.neutron [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1068.864233] env[68569]: DEBUG nova.network.neutron [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Updating instance_info_cache with network_info: [{"id": "9808ed24-af29-4716-93e9-049a46b044c6", "address": "fa:16:3e:f1:31:7d", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9808ed24-af", "ovs_interfaceid": "9808ed24-af29-4716-93e9-049a46b044c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.993820] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.325s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.994162] env[68569]: DEBUG nova.compute.manager [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1068.996759] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.616s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.998313] env[68569]: INFO nova.compute.claims [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1069.348298] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1069.348648] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1af0d520-8a13-4bd9-b738-4cd3540fe889 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.354823] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1069.354823] env[68569]: value = "task-3167629" [ 1069.354823] env[68569]: _type = "Task" [ 1069.354823] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.363399] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167629, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.367181] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Releasing lock "refresh_cache-24bcffcc-6da1-4ae5-b802-88e9364eaf0e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.368103] env[68569]: DEBUG nova.compute.manager [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Instance network_info: |[{"id": "9808ed24-af29-4716-93e9-049a46b044c6", "address": "fa:16:3e:f1:31:7d", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9808ed24-af", "ovs_interfaceid": "9808ed24-af29-4716-93e9-049a46b044c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1069.368568] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:31:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbdab640-5fea-4254-8bd3-f855b7eaca0d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9808ed24-af29-4716-93e9-049a46b044c6', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1069.376938] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1069.377220] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1069.377434] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3f192fa-5e04-4b67-a516-376cc05bc0dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.399268] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1069.399268] env[68569]: value = "task-3167630" [ 1069.399268] env[68569]: _type = "Task" [ 1069.399268] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.407018] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167630, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.485421] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "ee188712-b0e0-44ee-80b4-be72da32299f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.485655] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "ee188712-b0e0-44ee-80b4-be72da32299f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.502674] env[68569]: DEBUG nova.compute.utils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1069.507718] env[68569]: DEBUG nova.compute.manager [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1069.508434] env[68569]: DEBUG nova.network.neutron [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1069.551575] env[68569]: DEBUG nova.policy [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '868bbe891585423f85374f6dffdc7813', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62ff17f9dcc242e0aff061402e57bdcd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1069.599040] env[68569]: DEBUG nova.compute.manager [req-e03d2d82-ec95-49f4-9f6c-3a75c01e3abf req-3de5bbcc-e1fe-48c8-83de-9f92251a9d2e service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Received event network-changed-9808ed24-af29-4716-93e9-049a46b044c6 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1069.599138] env[68569]: DEBUG nova.compute.manager [req-e03d2d82-ec95-49f4-9f6c-3a75c01e3abf req-3de5bbcc-e1fe-48c8-83de-9f92251a9d2e service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Refreshing instance network info cache due to event network-changed-9808ed24-af29-4716-93e9-049a46b044c6. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1069.599409] env[68569]: DEBUG oslo_concurrency.lockutils [req-e03d2d82-ec95-49f4-9f6c-3a75c01e3abf req-3de5bbcc-e1fe-48c8-83de-9f92251a9d2e service nova] Acquiring lock "refresh_cache-24bcffcc-6da1-4ae5-b802-88e9364eaf0e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.599495] env[68569]: DEBUG oslo_concurrency.lockutils [req-e03d2d82-ec95-49f4-9f6c-3a75c01e3abf req-3de5bbcc-e1fe-48c8-83de-9f92251a9d2e service nova] Acquired lock "refresh_cache-24bcffcc-6da1-4ae5-b802-88e9364eaf0e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1069.599652] env[68569]: DEBUG nova.network.neutron [req-e03d2d82-ec95-49f4-9f6c-3a75c01e3abf req-3de5bbcc-e1fe-48c8-83de-9f92251a9d2e service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Refreshing network info cache for port 9808ed24-af29-4716-93e9-049a46b044c6 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1069.869613] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167629, 'name': PowerOffVM_Task, 'duration_secs': 0.190552} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.870261] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1069.870501] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1069.871268] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c680cc2b-26d2-4220-98e3-079731d3b04c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.878571] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1069.878799] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71b62d7c-6031-4595-a172-4685b5c43223 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.900020] env[68569]: DEBUG nova.network.neutron [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Successfully created port: bb074230-39bf-4287-9375-e7e6b237a383 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1069.909903] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167630, 'name': CreateVM_Task, 'duration_secs': 0.314253} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.910073] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1069.910728] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.910892] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1069.911215] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1069.911488] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c77fe440-4bb6-49cf-9b48-6da397772297 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.915801] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1069.915801] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ce2e2d-5835-2a74-eb67-367ae8da9ea0" [ 1069.915801] env[68569]: _type = "Task" [ 1069.915801] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.924649] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ce2e2d-5835-2a74-eb67-367ae8da9ea0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.939024] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1069.939315] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1069.942363] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Deleting the datastore file [datastore1] a7145443-aacb-4d9e-8e39-3741d0630849 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1069.942363] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2c80461-2087-4341-9820-8e8b8190aba4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.946566] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1069.946566] env[68569]: value = "task-3167632" [ 1069.946566] env[68569]: _type = "Task" [ 1069.946566] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.954535] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167632, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.991826] env[68569]: DEBUG nova.compute.manager [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1070.008465] env[68569]: DEBUG nova.compute.manager [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1070.243651] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135e9dff-86aa-4dd2-bc16-946ed5a85858 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.251275] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b0358b-c435-4a21-9992-d1c42c8b8842 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.285359] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259d87bc-9347-44cf-bada-df9ca83bdde7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.294129] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d37e89-7a3e-4b02-a208-1a1fcab47e07 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.307513] env[68569]: DEBUG nova.compute.provider_tree [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1070.362871] env[68569]: DEBUG nova.network.neutron [req-e03d2d82-ec95-49f4-9f6c-3a75c01e3abf req-3de5bbcc-e1fe-48c8-83de-9f92251a9d2e service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Updated VIF entry in instance network info cache for port 9808ed24-af29-4716-93e9-049a46b044c6. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1070.363233] env[68569]: DEBUG nova.network.neutron [req-e03d2d82-ec95-49f4-9f6c-3a75c01e3abf req-3de5bbcc-e1fe-48c8-83de-9f92251a9d2e service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Updating instance_info_cache with network_info: [{"id": "9808ed24-af29-4716-93e9-049a46b044c6", "address": "fa:16:3e:f1:31:7d", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9808ed24-af", "ovs_interfaceid": "9808ed24-af29-4716-93e9-049a46b044c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.425577] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ce2e2d-5835-2a74-eb67-367ae8da9ea0, 'name': SearchDatastore_Task, 'duration_secs': 0.01886} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.425862] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.426098] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1070.426336] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.426479] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.426657] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1070.426975] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93a7a32b-d051-4bfa-bb20-2e9e274d994a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.435012] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1070.435195] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1070.435859] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-399df299-e584-4b30-b047-1a478aba0af3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.440551] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1070.440551] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5239c1ee-334f-c608-f19a-7bf145f669ec" [ 1070.440551] env[68569]: _type = "Task" [ 1070.440551] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.448285] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5239c1ee-334f-c608-f19a-7bf145f669ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.455125] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167632, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.340717} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.455342] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1070.455517] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1070.455686] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1070.509226] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.811666] env[68569]: DEBUG nova.scheduler.client.report [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1070.865936] env[68569]: DEBUG oslo_concurrency.lockutils [req-e03d2d82-ec95-49f4-9f6c-3a75c01e3abf req-3de5bbcc-e1fe-48c8-83de-9f92251a9d2e service nova] Releasing lock "refresh_cache-24bcffcc-6da1-4ae5-b802-88e9364eaf0e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.952542] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5239c1ee-334f-c608-f19a-7bf145f669ec, 'name': SearchDatastore_Task, 'duration_secs': 0.00963} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.953341] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-584b97d8-2999-4515-bc77-0d3f77395a12 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.958434] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1070.958434] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524f9205-969f-b569-2618-515819a08dde" [ 1070.958434] env[68569]: _type = "Task" [ 1070.958434] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.969533] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524f9205-969f-b569-2618-515819a08dde, 'name': SearchDatastore_Task, 'duration_secs': 0.008711} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.969758] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.970024] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 24bcffcc-6da1-4ae5-b802-88e9364eaf0e/24bcffcc-6da1-4ae5-b802-88e9364eaf0e.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1070.970280] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a81da23-d46b-4297-bf65-7775bb578cc9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.976208] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1070.976208] env[68569]: value = "task-3167633" [ 1070.976208] env[68569]: _type = "Task" [ 1070.976208] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.983913] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167633, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.021095] env[68569]: DEBUG nova.compute.manager [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1071.046950] env[68569]: DEBUG nova.virt.hardware [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='81ce6db5764349e1728dcecfd0b46a59',container_format='bare',created_at=2025-03-26T04:58:09Z,direct_url=,disk_format='vmdk',id=1146c72a-4d3b-4831-9e73-15f957a6f7be,min_disk=1,min_ram=0,name='tempest-test-snap-1853772118',owner='62ff17f9dcc242e0aff061402e57bdcd',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-03-26T04:58:28Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1071.047235] env[68569]: DEBUG nova.virt.hardware [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1071.047379] env[68569]: DEBUG nova.virt.hardware [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1071.047534] env[68569]: DEBUG nova.virt.hardware [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1071.047679] env[68569]: DEBUG nova.virt.hardware [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1071.047826] env[68569]: DEBUG nova.virt.hardware [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1071.048051] env[68569]: DEBUG nova.virt.hardware [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1071.048223] env[68569]: DEBUG nova.virt.hardware [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1071.048391] env[68569]: DEBUG nova.virt.hardware [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1071.048548] env[68569]: DEBUG nova.virt.hardware [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1071.048714] env[68569]: DEBUG nova.virt.hardware [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1071.049606] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ee14fc-6a38-4a88-88d5-2fb397210011 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.057083] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f99327-0759-4f39-9d33-2658987a9d9f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.317707] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.321s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.318301] env[68569]: DEBUG nova.compute.manager [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1071.321361] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 8.392s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.477548] env[68569]: DEBUG nova.network.neutron [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Successfully updated port: bb074230-39bf-4287-9375-e7e6b237a383 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1071.487397] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167633, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.435834} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.487665] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 24bcffcc-6da1-4ae5-b802-88e9364eaf0e/24bcffcc-6da1-4ae5-b802-88e9364eaf0e.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1071.487874] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1071.488143] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7bd96ca6-85ff-4371-984d-0a35d15b909d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.496201] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1071.496201] env[68569]: value = "task-3167634" [ 1071.496201] env[68569]: _type = "Task" [ 1071.496201] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.498266] env[68569]: DEBUG nova.virt.hardware [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1071.498492] env[68569]: DEBUG nova.virt.hardware [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1071.498645] env[68569]: DEBUG nova.virt.hardware [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1071.498825] env[68569]: DEBUG nova.virt.hardware [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1071.498991] env[68569]: DEBUG nova.virt.hardware [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1071.499230] env[68569]: DEBUG nova.virt.hardware [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1071.499477] env[68569]: DEBUG nova.virt.hardware [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1071.499717] env[68569]: DEBUG nova.virt.hardware [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1071.499926] env[68569]: DEBUG nova.virt.hardware [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1071.500164] env[68569]: DEBUG nova.virt.hardware [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1071.500369] env[68569]: DEBUG nova.virt.hardware [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1071.501293] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724b252d-c3e4-4746-9970-60d0ccfcf834 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.515405] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7faf3e0-8501-4eb7-bc19-12b007f117c8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.519347] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167634, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.529331] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:be:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '82d84faa-d446-4ed1-b1c1-65399875d3f2', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1071.536678] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1071.536901] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1071.537121] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6b6e4a0a-5fea-41de-81ac-8742fe248ede {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.555479] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1071.555479] env[68569]: value = "task-3167635" [ 1071.555479] env[68569]: _type = "Task" [ 1071.555479] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.563699] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167635, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.569887] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1071.570183] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633698', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'name': 'volume-c8148d10-8bad-49bc-93bf-559f83378a11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3', 'attached_at': '', 'detached_at': '', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'serial': 'c8148d10-8bad-49bc-93bf-559f83378a11'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1071.570915] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-547f82e3-d134-4e54-a980-076e98204ff1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.587880] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2f1ece-3159-4994-838b-a438d72061ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.611643] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] volume-c8148d10-8bad-49bc-93bf-559f83378a11/volume-c8148d10-8bad-49bc-93bf-559f83378a11.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1071.611930] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-676f4ac7-68fc-463d-88d3-8fe9923434b4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.628316] env[68569]: DEBUG nova.compute.manager [req-b25879c6-0a84-4f81-a34c-1a9d2728234e req-899a8838-01df-43ec-a6ab-dd4992c9b1c9 service nova] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Received event network-vif-plugged-bb074230-39bf-4287-9375-e7e6b237a383 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1071.628554] env[68569]: DEBUG oslo_concurrency.lockutils [req-b25879c6-0a84-4f81-a34c-1a9d2728234e req-899a8838-01df-43ec-a6ab-dd4992c9b1c9 service nova] Acquiring lock "5eb7ff96-3a9e-470d-9cbe-644446302ecf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.628754] env[68569]: DEBUG oslo_concurrency.lockutils [req-b25879c6-0a84-4f81-a34c-1a9d2728234e req-899a8838-01df-43ec-a6ab-dd4992c9b1c9 service nova] Lock "5eb7ff96-3a9e-470d-9cbe-644446302ecf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.628937] env[68569]: DEBUG oslo_concurrency.lockutils [req-b25879c6-0a84-4f81-a34c-1a9d2728234e req-899a8838-01df-43ec-a6ab-dd4992c9b1c9 service nova] Lock "5eb7ff96-3a9e-470d-9cbe-644446302ecf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.629163] env[68569]: DEBUG nova.compute.manager [req-b25879c6-0a84-4f81-a34c-1a9d2728234e req-899a8838-01df-43ec-a6ab-dd4992c9b1c9 service nova] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] No waiting events found dispatching network-vif-plugged-bb074230-39bf-4287-9375-e7e6b237a383 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1071.629359] env[68569]: WARNING nova.compute.manager [req-b25879c6-0a84-4f81-a34c-1a9d2728234e req-899a8838-01df-43ec-a6ab-dd4992c9b1c9 service nova] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Received unexpected event network-vif-plugged-bb074230-39bf-4287-9375-e7e6b237a383 for instance with vm_state building and task_state spawning. [ 1071.629544] env[68569]: DEBUG nova.compute.manager [req-b25879c6-0a84-4f81-a34c-1a9d2728234e req-899a8838-01df-43ec-a6ab-dd4992c9b1c9 service nova] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Received event network-changed-bb074230-39bf-4287-9375-e7e6b237a383 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1071.629705] env[68569]: DEBUG nova.compute.manager [req-b25879c6-0a84-4f81-a34c-1a9d2728234e req-899a8838-01df-43ec-a6ab-dd4992c9b1c9 service nova] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Refreshing instance network info cache due to event network-changed-bb074230-39bf-4287-9375-e7e6b237a383. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1071.629890] env[68569]: DEBUG oslo_concurrency.lockutils [req-b25879c6-0a84-4f81-a34c-1a9d2728234e req-899a8838-01df-43ec-a6ab-dd4992c9b1c9 service nova] Acquiring lock "refresh_cache-5eb7ff96-3a9e-470d-9cbe-644446302ecf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.630056] env[68569]: DEBUG oslo_concurrency.lockutils [req-b25879c6-0a84-4f81-a34c-1a9d2728234e req-899a8838-01df-43ec-a6ab-dd4992c9b1c9 service nova] Acquired lock "refresh_cache-5eb7ff96-3a9e-470d-9cbe-644446302ecf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.630216] env[68569]: DEBUG nova.network.neutron [req-b25879c6-0a84-4f81-a34c-1a9d2728234e req-899a8838-01df-43ec-a6ab-dd4992c9b1c9 service nova] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Refreshing network info cache for port bb074230-39bf-4287-9375-e7e6b237a383 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1071.634895] env[68569]: DEBUG oslo_vmware.api [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1071.634895] env[68569]: value = "task-3167636" [ 1071.634895] env[68569]: _type = "Task" [ 1071.634895] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.641747] env[68569]: DEBUG oslo_vmware.api [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167636, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.825506] env[68569]: DEBUG nova.compute.utils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1071.827093] env[68569]: DEBUG nova.compute.manager [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1071.827307] env[68569]: DEBUG nova.network.neutron [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1071.833096] env[68569]: INFO nova.compute.claims [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1071.890204] env[68569]: DEBUG nova.policy [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e16bef4b0a6d4a5e937e4f3c4a3329b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67e07f7ab9ab41feb4d71e1d128d093d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1071.982939] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "refresh_cache-5eb7ff96-3a9e-470d-9cbe-644446302ecf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.009689] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167634, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06421} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.009974] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1072.010778] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445f5d65-e1c1-49aa-815d-49d1c1840522 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.032940] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 24bcffcc-6da1-4ae5-b802-88e9364eaf0e/24bcffcc-6da1-4ae5-b802-88e9364eaf0e.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1072.033369] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b29c773a-5050-43cd-98e3-0a5df750f913 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.052550] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1072.052550] env[68569]: value = "task-3167637" [ 1072.052550] env[68569]: _type = "Task" [ 1072.052550] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.061455] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167637, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.065970] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167635, 'name': CreateVM_Task, 'duration_secs': 0.32348} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.066605] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1072.066803] env[68569]: DEBUG oslo_concurrency.lockutils [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.067013] env[68569]: DEBUG oslo_concurrency.lockutils [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1072.067342] env[68569]: DEBUG oslo_concurrency.lockutils [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1072.067640] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dda0e98c-fcbb-4030-a1d8-e064d2004d97 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.071551] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1072.071551] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52425030-01f7-b65e-26b8-dd2c0c0e9ec6" [ 1072.071551] env[68569]: _type = "Task" [ 1072.071551] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.081414] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52425030-01f7-b65e-26b8-dd2c0c0e9ec6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.145419] env[68569]: DEBUG oslo_vmware.api [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167636, 'name': ReconfigVM_Task, 'duration_secs': 0.417689} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.145755] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Reconfigured VM instance instance-0000005a to attach disk [datastore1] volume-c8148d10-8bad-49bc-93bf-559f83378a11/volume-c8148d10-8bad-49bc-93bf-559f83378a11.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1072.150426] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b12022ea-4459-482f-af28-a838c8f9bb3f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.164694] env[68569]: DEBUG oslo_vmware.api [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1072.164694] env[68569]: value = "task-3167638" [ 1072.164694] env[68569]: _type = "Task" [ 1072.164694] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.172331] env[68569]: DEBUG oslo_vmware.api [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167638, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.177707] env[68569]: DEBUG nova.network.neutron [req-b25879c6-0a84-4f81-a34c-1a9d2728234e req-899a8838-01df-43ec-a6ab-dd4992c9b1c9 service nova] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1072.215840] env[68569]: DEBUG nova.network.neutron [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Successfully created port: fb7d829c-cd12-4906-a87e-6d26d31f5771 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1072.291227] env[68569]: DEBUG nova.network.neutron [req-b25879c6-0a84-4f81-a34c-1a9d2728234e req-899a8838-01df-43ec-a6ab-dd4992c9b1c9 service nova] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.343138] env[68569]: DEBUG nova.compute.manager [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1072.348819] env[68569]: INFO nova.compute.resource_tracker [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Updating resource usage from migration fadc6c12-6187-4e3b-b55a-610a9dbc0520 [ 1072.403323] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1072.403738] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1072.565076] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167637, 'name': ReconfigVM_Task, 'duration_secs': 0.336278} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.565497] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 24bcffcc-6da1-4ae5-b802-88e9364eaf0e/24bcffcc-6da1-4ae5-b802-88e9364eaf0e.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1072.565968] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a109a61-717e-4284-b7fe-f005abd55762 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.575764] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1072.575764] env[68569]: value = "task-3167639" [ 1072.575764] env[68569]: _type = "Task" [ 1072.575764] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.584334] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52425030-01f7-b65e-26b8-dd2c0c0e9ec6, 'name': SearchDatastore_Task, 'duration_secs': 0.009344} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.585055] env[68569]: DEBUG oslo_concurrency.lockutils [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1072.585313] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1072.585562] env[68569]: DEBUG oslo_concurrency.lockutils [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.585751] env[68569]: DEBUG oslo_concurrency.lockutils [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1072.585901] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1072.589075] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8484fa16-797d-4444-8c6f-44a1acd14ded {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.590710] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167639, 'name': Rename_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.597255] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1072.597574] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1072.598622] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a2bc985-e91d-4440-94bd-cd93ba66fe5d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.603722] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1072.603722] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52225367-1172-ba50-ec9f-03cb17430f6e" [ 1072.603722] env[68569]: _type = "Task" [ 1072.603722] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.611719] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52225367-1172-ba50-ec9f-03cb17430f6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.674065] env[68569]: DEBUG oslo_vmware.api [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167638, 'name': ReconfigVM_Task, 'duration_secs': 0.138314} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.676797] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633698', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'name': 'volume-c8148d10-8bad-49bc-93bf-559f83378a11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3', 'attached_at': '', 'detached_at': '', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'serial': 'c8148d10-8bad-49bc-93bf-559f83378a11'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1072.689099] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e3a1f3-9e00-4f88-bba6-d4bcc699d693 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.696646] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f6f45e-4a90-41dc-bea6-78b7aed3ebf1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.728270] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539fc99c-5f6d-4f00-98bc-96acebf72917 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.735398] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63bbc415-85d4-4876-b95f-3b62e4cd1dd3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.749224] env[68569]: DEBUG nova.compute.provider_tree [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.794540] env[68569]: DEBUG oslo_concurrency.lockutils [req-b25879c6-0a84-4f81-a34c-1a9d2728234e req-899a8838-01df-43ec-a6ab-dd4992c9b1c9 service nova] Releasing lock "refresh_cache-5eb7ff96-3a9e-470d-9cbe-644446302ecf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1072.794941] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired lock "refresh_cache-5eb7ff96-3a9e-470d-9cbe-644446302ecf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1072.795198] env[68569]: DEBUG nova.network.neutron [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1072.910750] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1072.910959] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1072.911168] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1072.911359] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1072.911526] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1072.911931] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1072.911931] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68569) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11160}} [ 1072.911931] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1073.086164] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167639, 'name': Rename_Task, 'duration_secs': 0.146407} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.086444] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1073.086678] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-684da5d7-4f7b-420d-836a-6a99e4add5ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.094025] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1073.094025] env[68569]: value = "task-3167640" [ 1073.094025] env[68569]: _type = "Task" [ 1073.094025] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.102367] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167640, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.112013] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52225367-1172-ba50-ec9f-03cb17430f6e, 'name': SearchDatastore_Task, 'duration_secs': 0.008752} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.112756] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bec1d8d-7f80-41e2-92e0-c4d906a1eb19 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.117735] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1073.117735] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525fc3b8-df47-9e12-a8b7-1e7956370d83" [ 1073.117735] env[68569]: _type = "Task" [ 1073.117735] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.124987] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525fc3b8-df47-9e12-a8b7-1e7956370d83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.251996] env[68569]: DEBUG nova.scheduler.client.report [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1073.325287] env[68569]: DEBUG nova.network.neutron [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1073.359581] env[68569]: DEBUG nova.compute.manager [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1073.388151] env[68569]: DEBUG nova.virt.hardware [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1073.388409] env[68569]: DEBUG nova.virt.hardware [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1073.388565] env[68569]: DEBUG nova.virt.hardware [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1073.388994] env[68569]: DEBUG nova.virt.hardware [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1073.388994] env[68569]: DEBUG nova.virt.hardware [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1073.389176] env[68569]: DEBUG nova.virt.hardware [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1073.389339] env[68569]: DEBUG nova.virt.hardware [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1073.389412] env[68569]: DEBUG nova.virt.hardware [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1073.389575] env[68569]: DEBUG nova.virt.hardware [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1073.389732] env[68569]: DEBUG nova.virt.hardware [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1073.389926] env[68569]: DEBUG nova.virt.hardware [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1073.390781] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585c1adb-7b83-49cd-b2ab-0173b416ca63 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.400788] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d47750-9456-4928-aab6-dde76d481f85 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.415955] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.459663] env[68569]: DEBUG nova.network.neutron [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Updating instance_info_cache with network_info: [{"id": "bb074230-39bf-4287-9375-e7e6b237a383", "address": "fa:16:3e:ea:d3:c6", "network": {"id": "bf7e9923-9223-4b14-8176-d3910d2b1c90", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1728039945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62ff17f9dcc242e0aff061402e57bdcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb074230-39", "ovs_interfaceid": "bb074230-39bf-4287-9375-e7e6b237a383", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.604028] env[68569]: DEBUG oslo_vmware.api [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167640, 'name': PowerOnVM_Task, 'duration_secs': 0.413438} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.604763] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1073.605032] env[68569]: INFO nova.compute.manager [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Took 6.70 seconds to spawn the instance on the hypervisor. [ 1073.605273] env[68569]: DEBUG nova.compute.manager [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1073.606123] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343a8725-8d2e-4bfd-8518-da8321d27cd2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.627020] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525fc3b8-df47-9e12-a8b7-1e7956370d83, 'name': SearchDatastore_Task, 'duration_secs': 0.009411} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.627334] env[68569]: DEBUG oslo_concurrency.lockutils [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1073.627621] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] a7145443-aacb-4d9e-8e39-3741d0630849/a7145443-aacb-4d9e-8e39-3741d0630849.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1073.628254] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9700af37-64a4-4257-8a16-691e7f903ca0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.634355] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1073.634355] env[68569]: value = "task-3167641" [ 1073.634355] env[68569]: _type = "Task" [ 1073.634355] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.643118] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167641, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.682352] env[68569]: DEBUG nova.compute.manager [req-cc825e09-abc4-48c0-af88-681ca7fc6d53 req-893db546-41bf-40f7-a9dc-5180c305dd76 service nova] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Received event network-vif-plugged-fb7d829c-cd12-4906-a87e-6d26d31f5771 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1073.682570] env[68569]: DEBUG oslo_concurrency.lockutils [req-cc825e09-abc4-48c0-af88-681ca7fc6d53 req-893db546-41bf-40f7-a9dc-5180c305dd76 service nova] Acquiring lock "7c71799e-21d9-41f8-b35c-7117354a0287-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.682777] env[68569]: DEBUG oslo_concurrency.lockutils [req-cc825e09-abc4-48c0-af88-681ca7fc6d53 req-893db546-41bf-40f7-a9dc-5180c305dd76 service nova] Lock "7c71799e-21d9-41f8-b35c-7117354a0287-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.682935] env[68569]: DEBUG oslo_concurrency.lockutils [req-cc825e09-abc4-48c0-af88-681ca7fc6d53 req-893db546-41bf-40f7-a9dc-5180c305dd76 service nova] Lock "7c71799e-21d9-41f8-b35c-7117354a0287-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.683204] env[68569]: DEBUG nova.compute.manager [req-cc825e09-abc4-48c0-af88-681ca7fc6d53 req-893db546-41bf-40f7-a9dc-5180c305dd76 service nova] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] No waiting events found dispatching network-vif-plugged-fb7d829c-cd12-4906-a87e-6d26d31f5771 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1073.683721] env[68569]: WARNING nova.compute.manager [req-cc825e09-abc4-48c0-af88-681ca7fc6d53 req-893db546-41bf-40f7-a9dc-5180c305dd76 service nova] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Received unexpected event network-vif-plugged-fb7d829c-cd12-4906-a87e-6d26d31f5771 for instance with vm_state building and task_state spawning. [ 1073.714410] env[68569]: DEBUG nova.objects.instance [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lazy-loading 'flavor' on Instance uuid a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.757595] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.436s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.757805] env[68569]: INFO nova.compute.manager [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Migrating [ 1073.765678] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.621s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.765946] env[68569]: DEBUG nova.objects.instance [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lazy-loading 'resources' on Instance uuid 7b10cfb4-dc0a-4311-a24f-7a25869ef594 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.774945] env[68569]: DEBUG nova.network.neutron [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Successfully updated port: fb7d829c-cd12-4906-a87e-6d26d31f5771 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1073.963159] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lock "refresh_cache-5eb7ff96-3a9e-470d-9cbe-644446302ecf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1073.963505] env[68569]: DEBUG nova.compute.manager [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Instance network_info: |[{"id": "bb074230-39bf-4287-9375-e7e6b237a383", "address": "fa:16:3e:ea:d3:c6", "network": {"id": "bf7e9923-9223-4b14-8176-d3910d2b1c90", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1728039945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62ff17f9dcc242e0aff061402e57bdcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb074230-39", "ovs_interfaceid": "bb074230-39bf-4287-9375-e7e6b237a383", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1073.963962] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:d3:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '667a2e97-c1be-421d-9941-6b84c2629b43', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb074230-39bf-4287-9375-e7e6b237a383', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1073.971709] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1073.971975] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1073.972317] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f9af51ed-1a1b-406d-9958-b2bbc4eb4a2d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.993183] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1073.993183] env[68569]: value = "task-3167642" [ 1073.993183] env[68569]: _type = "Task" [ 1073.993183] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.001652] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167642, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.126917] env[68569]: INFO nova.compute.manager [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Took 16.30 seconds to build instance. [ 1074.145028] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167641, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473982} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.145187] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] a7145443-aacb-4d9e-8e39-3741d0630849/a7145443-aacb-4d9e-8e39-3741d0630849.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1074.145410] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1074.145738] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb69dd8d-5006-4c85-b7a4-4072de1aea39 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.156290] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1074.156290] env[68569]: value = "task-3167643" [ 1074.156290] env[68569]: _type = "Task" [ 1074.156290] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.168487] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167643, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.220758] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2a32f364-6c98-4b80-8bb8-72d2239c0a9a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.250s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.275244] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "refresh_cache-d7a0631c-902d-4653-b900-2123de5bcb44" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.275417] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquired lock "refresh_cache-d7a0631c-902d-4653-b900-2123de5bcb44" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.275585] env[68569]: DEBUG nova.network.neutron [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1074.279287] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "refresh_cache-7c71799e-21d9-41f8-b35c-7117354a0287" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.279287] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "refresh_cache-7c71799e-21d9-41f8-b35c-7117354a0287" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.279426] env[68569]: DEBUG nova.network.neutron [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1074.503146] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167642, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.504742] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece95b15-2a53-4f63-b57b-b48631dad282 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.511550] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8cfa70-93f5-4fd7-922d-64b1148dcf59 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.542576] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1409794a-3103-4cf0-bb56-4673558743ed {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.551113] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f3ee41-f684-438a-9cd2-18ea51c06011 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.565928] env[68569]: DEBUG nova.compute.provider_tree [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1074.629782] env[68569]: DEBUG oslo_concurrency.lockutils [None req-07099a00-d205-4286-940b-3b4ada4e19b3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.821s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.657298] env[68569]: INFO nova.compute.manager [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Rebuilding instance [ 1074.675230] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167643, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068786} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.676270] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1074.677115] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89d7b62-14eb-4e70-acf3-cd9e32661b6f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.705556] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] a7145443-aacb-4d9e-8e39-3741d0630849/a7145443-aacb-4d9e-8e39-3741d0630849.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1074.710109] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aebee85b-2369-438b-9241-b2398ad9b5ea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.730185] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1074.730185] env[68569]: value = "task-3167644" [ 1074.730185] env[68569]: _type = "Task" [ 1074.730185] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.738904] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167644, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.743874] env[68569]: DEBUG nova.compute.manager [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1074.744698] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb88b90-5473-40f5-8f39-c7d95be77ae8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.840401] env[68569]: DEBUG nova.network.neutron [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1075.002995] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167642, 'name': CreateVM_Task, 'duration_secs': 0.693875} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.003173] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1075.003833] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1146c72a-4d3b-4831-9e73-15f957a6f7be" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.003997] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1146c72a-4d3b-4831-9e73-15f957a6f7be" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1075.004420] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1146c72a-4d3b-4831-9e73-15f957a6f7be" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1075.004671] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53856ce3-5481-47f7-82f3-bbe34e1106e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.009843] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1075.009843] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52273a9d-d2d8-dc3f-3ac8-ed1859f687c1" [ 1075.009843] env[68569]: _type = "Task" [ 1075.009843] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.017132] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52273a9d-d2d8-dc3f-3ac8-ed1859f687c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.087557] env[68569]: ERROR nova.scheduler.client.report [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] [req-585ede58-2317-49c3-8a45-f78f1bfcafd9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-585ede58-2317-49c3-8a45-f78f1bfcafd9"}]} [ 1075.103895] env[68569]: DEBUG nova.scheduler.client.report [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1075.123369] env[68569]: DEBUG nova.network.neutron [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Updating instance_info_cache with network_info: [{"id": "fb7d829c-cd12-4906-a87e-6d26d31f5771", "address": "fa:16:3e:aa:af:c2", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb7d829c-cd", "ovs_interfaceid": "fb7d829c-cd12-4906-a87e-6d26d31f5771", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.129065] env[68569]: DEBUG nova.scheduler.client.report [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1075.129406] env[68569]: DEBUG nova.compute.provider_tree [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1075.147219] env[68569]: DEBUG nova.scheduler.client.report [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1075.173152] env[68569]: DEBUG nova.network.neutron [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Updating instance_info_cache with network_info: [{"id": "e9d20c97-bf70-4f30-8424-5db6b70e5677", "address": "fa:16:3e:69:64:5d", "network": {"id": "d8204444-45eb-4fa2-93d4-abc5da746881", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1840626634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "decd2576711b41bbb25300d9db62643e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9d20c97-bf", "ovs_interfaceid": "e9d20c97-bf70-4f30-8424-5db6b70e5677", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.176344] env[68569]: DEBUG nova.scheduler.client.report [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1075.240795] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167644, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.395165] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64d254d-d4a0-4fbc-b370-a706d1f17e1f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.402936] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a1b17e-0998-4739-9eb8-3bb0a0acd329 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.434172] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b801a2-b223-4ecd-8c2c-20b5a612f807 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.442062] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc23c2c-93e5-4ac7-8cc3-d1f209f344ce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.455348] env[68569]: DEBUG nova.compute.provider_tree [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1075.520243] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1146c72a-4d3b-4831-9e73-15f957a6f7be" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.520562] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Processing image 1146c72a-4d3b-4831-9e73-15f957a6f7be {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1075.521023] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1146c72a-4d3b-4831-9e73-15f957a6f7be/1146c72a-4d3b-4831-9e73-15f957a6f7be.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.521098] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1146c72a-4d3b-4831-9e73-15f957a6f7be/1146c72a-4d3b-4831-9e73-15f957a6f7be.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1075.521360] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1075.521619] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e2365ee-3b64-4053-8577-0e0fdde11faa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.532279] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1075.532459] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1075.533198] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90ce3734-0d4a-4fb4-b6fb-353c49440c00 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.538834] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1075.538834] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52417396-465d-da00-9654-08b114dc3481" [ 1075.538834] env[68569]: _type = "Task" [ 1075.538834] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.546253] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52417396-465d-da00-9654-08b114dc3481, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.626507] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "refresh_cache-7c71799e-21d9-41f8-b35c-7117354a0287" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.626863] env[68569]: DEBUG nova.compute.manager [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Instance network_info: |[{"id": "fb7d829c-cd12-4906-a87e-6d26d31f5771", "address": "fa:16:3e:aa:af:c2", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb7d829c-cd", "ovs_interfaceid": "fb7d829c-cd12-4906-a87e-6d26d31f5771", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1075.627274] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:af:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54c45719-5690-47bf-b45b-6cad9813071e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb7d829c-cd12-4906-a87e-6d26d31f5771', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1075.634880] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1075.635085] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1075.635306] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01842a91-0489-4acb-af61-885bcf00a335 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.655259] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1075.655259] env[68569]: value = "task-3167645" [ 1075.655259] env[68569]: _type = "Task" [ 1075.655259] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.662691] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167645, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.680286] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Releasing lock "refresh_cache-d7a0631c-902d-4653-b900-2123de5bcb44" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.717246] env[68569]: DEBUG nova.compute.manager [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Received event network-changed-fb7d829c-cd12-4906-a87e-6d26d31f5771 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1075.717440] env[68569]: DEBUG nova.compute.manager [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Refreshing instance network info cache due to event network-changed-fb7d829c-cd12-4906-a87e-6d26d31f5771. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1075.717549] env[68569]: DEBUG oslo_concurrency.lockutils [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] Acquiring lock "refresh_cache-7c71799e-21d9-41f8-b35c-7117354a0287" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.717683] env[68569]: DEBUG oslo_concurrency.lockutils [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] Acquired lock "refresh_cache-7c71799e-21d9-41f8-b35c-7117354a0287" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1075.717832] env[68569]: DEBUG nova.network.neutron [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Refreshing network info cache for port fb7d829c-cd12-4906-a87e-6d26d31f5771 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1075.739803] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167644, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.759977] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1075.760542] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c826dba1-d7a5-414b-9b39-4d5fb701c6cb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.766864] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1075.766864] env[68569]: value = "task-3167646" [ 1075.766864] env[68569]: _type = "Task" [ 1075.766864] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.774541] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167646, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.989591] env[68569]: DEBUG nova.scheduler.client.report [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 151 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1075.989919] env[68569]: DEBUG nova.compute.provider_tree [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 151 to 152 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1075.990150] env[68569]: DEBUG nova.compute.provider_tree [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1076.050320] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Preparing fetch location {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1076.050628] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Fetch image to [datastore1] OSTACK_IMG_4564c366-8636-472d-a117-7cfcc28bdb7b/OSTACK_IMG_4564c366-8636-472d-a117-7cfcc28bdb7b.vmdk {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1076.050835] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Downloading stream optimized image 1146c72a-4d3b-4831-9e73-15f957a6f7be to [datastore1] OSTACK_IMG_4564c366-8636-472d-a117-7cfcc28bdb7b/OSTACK_IMG_4564c366-8636-472d-a117-7cfcc28bdb7b.vmdk on the data store datastore1 as vApp {{(pid=68569) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1076.051044] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Downloading image file data 1146c72a-4d3b-4831-9e73-15f957a6f7be to the ESX as VM named 'OSTACK_IMG_4564c366-8636-472d-a117-7cfcc28bdb7b' {{(pid=68569) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1076.132270] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1076.132270] env[68569]: value = "resgroup-9" [ 1076.132270] env[68569]: _type = "ResourcePool" [ 1076.132270] env[68569]: }. {{(pid=68569) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1076.132661] env[68569]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-e177bf4e-4f58-40a2-b035-6e648889ece1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.152992] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lease: (returnval){ [ 1076.152992] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5282b394-3122-b456-e3b0-856113688988" [ 1076.152992] env[68569]: _type = "HttpNfcLease" [ 1076.152992] env[68569]: } obtained for vApp import into resource pool (val){ [ 1076.152992] env[68569]: value = "resgroup-9" [ 1076.152992] env[68569]: _type = "ResourcePool" [ 1076.152992] env[68569]: }. {{(pid=68569) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1076.153361] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the lease: (returnval){ [ 1076.153361] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5282b394-3122-b456-e3b0-856113688988" [ 1076.153361] env[68569]: _type = "HttpNfcLease" [ 1076.153361] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1076.161963] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1076.161963] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5282b394-3122-b456-e3b0-856113688988" [ 1076.161963] env[68569]: _type = "HttpNfcLease" [ 1076.161963] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1076.164872] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167645, 'name': CreateVM_Task, 'duration_secs': 0.403017} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.165039] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1076.165691] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.165852] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.166184] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1076.166418] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5acdf574-325e-4d73-9c3f-f47b2e94103c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.171840] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1076.171840] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52dd9623-1b1c-5a66-a0e3-c5fb4dcef37f" [ 1076.171840] env[68569]: _type = "Task" [ 1076.171840] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.180870] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52dd9623-1b1c-5a66-a0e3-c5fb4dcef37f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.241171] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167644, 'name': ReconfigVM_Task, 'duration_secs': 1.018061} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.241520] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Reconfigured VM instance instance-0000005b to attach disk [datastore1] a7145443-aacb-4d9e-8e39-3741d0630849/a7145443-aacb-4d9e-8e39-3741d0630849.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1076.242191] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3bf2368e-6d6a-4288-abad-9965c75ba520 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.248648] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1076.248648] env[68569]: value = "task-3167648" [ 1076.248648] env[68569]: _type = "Task" [ 1076.248648] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.257224] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167648, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.276210] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167646, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.495698] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.730s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.500548] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.636s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.500548] env[68569]: INFO nova.compute.claims [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1076.514674] env[68569]: DEBUG nova.network.neutron [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Updated VIF entry in instance network info cache for port fb7d829c-cd12-4906-a87e-6d26d31f5771. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1076.515055] env[68569]: DEBUG nova.network.neutron [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Updating instance_info_cache with network_info: [{"id": "fb7d829c-cd12-4906-a87e-6d26d31f5771", "address": "fa:16:3e:aa:af:c2", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb7d829c-cd", "ovs_interfaceid": "fb7d829c-cd12-4906-a87e-6d26d31f5771", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.517885] env[68569]: INFO nova.scheduler.client.report [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Deleted allocations for instance 7b10cfb4-dc0a-4311-a24f-7a25869ef594 [ 1076.661420] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1076.661420] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5282b394-3122-b456-e3b0-856113688988" [ 1076.661420] env[68569]: _type = "HttpNfcLease" [ 1076.661420] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1076.661667] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1076.661667] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5282b394-3122-b456-e3b0-856113688988" [ 1076.661667] env[68569]: _type = "HttpNfcLease" [ 1076.661667] env[68569]: }. {{(pid=68569) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1076.662384] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8592ee80-d182-4385-a83c-2a7792a9cf3f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.669409] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526e0fc5-ee24-b270-fcf5-b967ac76708d/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1076.669576] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526e0fc5-ee24-b270-fcf5-b967ac76708d/disk-0.vmdk. {{(pid=68569) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1076.738053] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52dd9623-1b1c-5a66-a0e3-c5fb4dcef37f, 'name': SearchDatastore_Task, 'duration_secs': 0.020059} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.741415] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1076.741608] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1076.742208] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.742208] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.742461] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1076.744327] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef713c99-0c54-43b9-8a3a-87d5113f386c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.746358] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-057860b8-cf9b-4dee-966b-4c844363d3c2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.755435] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1076.755654] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1076.759477] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bb6e8a4-7570-4af6-a765-a7605bae1418 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.761735] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167648, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.764957] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1076.764957] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ad1d7b-30e6-3cd2-fe7f-5d663d1f949e" [ 1076.764957] env[68569]: _type = "Task" [ 1076.764957] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.778655] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ad1d7b-30e6-3cd2-fe7f-5d663d1f949e, 'name': SearchDatastore_Task, 'duration_secs': 0.010133} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.782496] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167646, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.782763] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f38e7fa-7c7a-4203-86e3-b5b8bcba4e1e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.787630] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1076.787630] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524653a5-c780-834f-8a0a-d9a97962dcd5" [ 1076.787630] env[68569]: _type = "Task" [ 1076.787630] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.796023] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524653a5-c780-834f-8a0a-d9a97962dcd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.018063] env[68569]: DEBUG oslo_concurrency.lockutils [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] Releasing lock "refresh_cache-7c71799e-21d9-41f8-b35c-7117354a0287" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.018406] env[68569]: DEBUG nova.compute.manager [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Received event network-changed-9808ed24-af29-4716-93e9-049a46b044c6 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1077.018638] env[68569]: DEBUG nova.compute.manager [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Refreshing instance network info cache due to event network-changed-9808ed24-af29-4716-93e9-049a46b044c6. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1077.018958] env[68569]: DEBUG oslo_concurrency.lockutils [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] Acquiring lock "refresh_cache-24bcffcc-6da1-4ae5-b802-88e9364eaf0e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.019221] env[68569]: DEBUG oslo_concurrency.lockutils [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] Acquired lock "refresh_cache-24bcffcc-6da1-4ae5-b802-88e9364eaf0e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1077.019460] env[68569]: DEBUG nova.network.neutron [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Refreshing network info cache for port 9808ed24-af29-4716-93e9-049a46b044c6 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1077.025894] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a5fe781c-6632-47a9-9fb7-0abce231fc2b tempest-ServerShowV247Test-2077883231 tempest-ServerShowV247Test-2077883231-project-member] Lock "7b10cfb4-dc0a-4311-a24f-7a25869ef594" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.063s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.261956] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167648, 'name': Rename_Task, 'duration_secs': 0.579687} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.263608] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1077.264155] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-59e28d2a-799a-45ff-bbd8-6a18ec5fb166 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.271238] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1077.271238] env[68569]: value = "task-3167649" [ 1077.271238] env[68569]: _type = "Task" [ 1077.271238] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.290120] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167646, 'name': PowerOffVM_Task, 'duration_secs': 1.173078} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.291089] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167649, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.292848] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1077.304310] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524653a5-c780-834f-8a0a-d9a97962dcd5, 'name': SearchDatastore_Task, 'duration_secs': 0.008781} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.307394] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.307762] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 7c71799e-21d9-41f8-b35c-7117354a0287/7c71799e-21d9-41f8-b35c-7117354a0287.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1077.310523] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4102c2f2-8fb1-4a8f-a3cc-5ce349e86e1a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.318737] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1077.318737] env[68569]: value = "task-3167650" [ 1077.318737] env[68569]: _type = "Task" [ 1077.318737] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.327881] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167650, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.372568] env[68569]: INFO nova.compute.manager [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Detaching volume c8148d10-8bad-49bc-93bf-559f83378a11 [ 1077.412044] env[68569]: INFO nova.virt.block_device [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Attempting to driver detach volume c8148d10-8bad-49bc-93bf-559f83378a11 from mountpoint /dev/sdb [ 1077.412306] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1077.412488] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633698', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'name': 'volume-c8148d10-8bad-49bc-93bf-559f83378a11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3', 'attached_at': '', 'detached_at': '', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'serial': 'c8148d10-8bad-49bc-93bf-559f83378a11'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1077.413414] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d642aa82-6d64-4986-8392-86578c52c8dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.435053] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ecdc63e-55cc-40ac-87bd-7794120947d9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.441919] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073bdbbb-89ab-4f4c-8dc7-00347004f64c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.463794] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137b2965-5cf0-484c-b1d9-9c373d08aee1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.479478] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] The volume has not been displaced from its original location: [datastore1] volume-c8148d10-8bad-49bc-93bf-559f83378a11/volume-c8148d10-8bad-49bc-93bf-559f83378a11.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1077.485133] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Reconfiguring VM instance instance-0000005a to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1077.488882] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Completed reading data from the image iterator. {{(pid=68569) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1077.489136] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526e0fc5-ee24-b270-fcf5-b967ac76708d/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1077.489395] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63d03995-b2fa-4300-90de-cd46340692c1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.503663] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665f8dea-85d0-400b-af7b-c42b74575803 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.512922] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526e0fc5-ee24-b270-fcf5-b967ac76708d/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1077.513128] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526e0fc5-ee24-b270-fcf5-b967ac76708d/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1077.514925] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-e6dac708-a1e7-45e5-b433-a2e0db6cc521 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.516627] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1077.516627] env[68569]: value = "task-3167651" [ 1077.516627] env[68569]: _type = "Task" [ 1077.516627] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.525427] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167651, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.795032] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167649, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.810990] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061c4f14-0382-475e-89b6-28add4bdded3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.825944] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0f29bd-b7a8-4912-b4b3-5b29bec797c3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.838701] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167650, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.866858] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70552e9-0af8-415c-9390-0e8c946a80b8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.877828] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77953114-6110-4f18-97cc-4b4138f9c89b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.884871] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526e0fc5-ee24-b270-fcf5-b967ac76708d/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1077.885096] env[68569]: INFO nova.virt.vmwareapi.images [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Downloaded image file data 1146c72a-4d3b-4831-9e73-15f957a6f7be [ 1077.886171] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58fdc99-8f92-497d-b253-8ae912a3efa6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.896375] env[68569]: DEBUG nova.compute.provider_tree [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1077.911044] env[68569]: DEBUG nova.network.neutron [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Updated VIF entry in instance network info cache for port 9808ed24-af29-4716-93e9-049a46b044c6. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1077.911417] env[68569]: DEBUG nova.network.neutron [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Updating instance_info_cache with network_info: [{"id": "9808ed24-af29-4716-93e9-049a46b044c6", "address": "fa:16:3e:f1:31:7d", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9808ed24-af", "ovs_interfaceid": "9808ed24-af29-4716-93e9-049a46b044c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.912996] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b70998f-9ea8-490b-b48d-f231559af1a2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.953875] env[68569]: INFO nova.virt.vmwareapi.images [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] The imported VM was unregistered [ 1077.956641] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Caching image {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1077.957023] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Creating directory with path [datastore1] devstack-image-cache_base/1146c72a-4d3b-4831-9e73-15f957a6f7be {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1077.957637] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9650124a-a965-4ff4-8ea9-8bb0e35bc8fa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.977752] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Created directory with path [datastore1] devstack-image-cache_base/1146c72a-4d3b-4831-9e73-15f957a6f7be {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1077.978033] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_4564c366-8636-472d-a117-7cfcc28bdb7b/OSTACK_IMG_4564c366-8636-472d-a117-7cfcc28bdb7b.vmdk to [datastore1] devstack-image-cache_base/1146c72a-4d3b-4831-9e73-15f957a6f7be/1146c72a-4d3b-4831-9e73-15f957a6f7be.vmdk. {{(pid=68569) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1077.978243] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-51bd43bf-8646-444f-8e7d-5cddc57b2305 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.985062] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1077.985062] env[68569]: value = "task-3167653" [ 1077.985062] env[68569]: _type = "Task" [ 1077.985062] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.992942] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167653, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.025878] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167651, 'name': ReconfigVM_Task, 'duration_secs': 0.330646} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.026175] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Reconfigured VM instance instance-0000005a to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1078.030795] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fc05776-cddb-4971-a381-a4eb64666009 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.045449] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1078.045449] env[68569]: value = "task-3167654" [ 1078.045449] env[68569]: _type = "Task" [ 1078.045449] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.055182] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167654, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.244771] env[68569]: ERROR nova.compute.manager [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Setting instance vm_state to ERROR: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1078.244771] env[68569]: ERROR nova.compute.manager [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Traceback (most recent call last): [ 1078.244771] env[68569]: ERROR nova.compute.manager [instance: d7a0631c-902d-4653-b900-2123de5bcb44] File "/opt/stack/nova/nova/compute/manager.py", line 11478, in _error_out_instance_on_exception [ 1078.244771] env[68569]: ERROR nova.compute.manager [instance: d7a0631c-902d-4653-b900-2123de5bcb44] yield [ 1078.244771] env[68569]: ERROR nova.compute.manager [instance: d7a0631c-902d-4653-b900-2123de5bcb44] File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 1078.244771] env[68569]: ERROR nova.compute.manager [instance: d7a0631c-902d-4653-b900-2123de5bcb44] disk_info = self.driver.migrate_disk_and_power_off( [ 1078.244771] env[68569]: ERROR nova.compute.manager [instance: d7a0631c-902d-4653-b900-2123de5bcb44] TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1078.244771] env[68569]: ERROR nova.compute.manager [instance: d7a0631c-902d-4653-b900-2123de5bcb44] [ 1078.290487] env[68569]: DEBUG oslo_vmware.api [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167649, 'name': PowerOnVM_Task, 'duration_secs': 0.608328} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.290997] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1078.291257] env[68569]: DEBUG nova.compute.manager [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1078.292481] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73924939-dae0-44f5-bf21-79dee84f711d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.331967] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167650, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530136} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.331967] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 7c71799e-21d9-41f8-b35c-7117354a0287/7c71799e-21d9-41f8-b35c-7117354a0287.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1078.332235] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1078.332345] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1cafc5c-1b4f-4916-9e85-8fafa7797a48 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.341805] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1078.341805] env[68569]: value = "task-3167655" [ 1078.341805] env[68569]: _type = "Task" [ 1078.341805] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.354922] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167655, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.400663] env[68569]: DEBUG nova.scheduler.client.report [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1078.416267] env[68569]: DEBUG oslo_concurrency.lockutils [req-d1a162dc-ca67-402a-8fe0-e29945ffe875 req-16eb8a15-03da-4ece-a49d-2c9644d687af service nova] Releasing lock "refresh_cache-24bcffcc-6da1-4ae5-b802-88e9364eaf0e" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.497585] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167653, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.557744] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167654, 'name': ReconfigVM_Task, 'duration_secs': 0.286882} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.558174] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633698', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'name': 'volume-c8148d10-8bad-49bc-93bf-559f83378a11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3', 'attached_at': '', 'detached_at': '', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'serial': 'c8148d10-8bad-49bc-93bf-559f83378a11'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1078.771056] env[68569]: INFO nova.compute.manager [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Swapping old allocation on dict_keys(['a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6']) held by migration fadc6c12-6187-4e3b-b55a-610a9dbc0520 for instance [ 1078.796813] env[68569]: DEBUG nova.scheduler.client.report [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Overwriting current allocation {'allocations': {'a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 152}}, 'project_id': 'decd2576711b41bbb25300d9db62643e', 'user_id': '330a73f609f746d8b8c1a7eefe557c69', 'consumer_generation': 1} on consumer d7a0631c-902d-4653-b900-2123de5bcb44 {{(pid=68569) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1078.813556] env[68569]: DEBUG oslo_concurrency.lockutils [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.855742] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167655, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072395} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.858152] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1078.859267] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031121ef-88ca-4977-a0ca-5a99d3dc05b7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.884642] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 7c71799e-21d9-41f8-b35c-7117354a0287/7c71799e-21d9-41f8-b35c-7117354a0287.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1078.885365] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a63698c8-2378-4b3e-be87-bb6611c7afbb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.906507] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.408s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.907090] env[68569]: DEBUG nova.compute.manager [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1078.911119] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.160s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.911373] env[68569]: DEBUG nova.objects.instance [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lazy-loading 'resources' on Instance uuid c0ea0ef8-93c2-416a-8caa-a51f7a39627e {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1078.912877] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1078.912877] env[68569]: value = "task-3167656" [ 1078.912877] env[68569]: _type = "Task" [ 1078.912877] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.924173] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167656, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.997210] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167653, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.414577] env[68569]: DEBUG nova.compute.utils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1079.419750] env[68569]: DEBUG nova.compute.manager [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1079.419750] env[68569]: DEBUG nova.network.neutron [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1079.435211] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167656, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.480963] env[68569]: DEBUG nova.policy [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69f4475c53cd434cbad10ee97213193d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83427ebdc6364f7887409f7bfc35f025', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1079.498289] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167653, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.610132] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1079.610723] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-811105a5-9bea-42d0-8efc-3851aac480d1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.620089] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1079.620089] env[68569]: value = "task-3167657" [ 1079.620089] env[68569]: _type = "Task" [ 1079.620089] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.634194] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] VM already powered off {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1079.634194] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1079.634194] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633698', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'name': 'volume-c8148d10-8bad-49bc-93bf-559f83378a11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3', 'attached_at': '', 'detached_at': '', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'serial': 'c8148d10-8bad-49bc-93bf-559f83378a11'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1079.636385] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4dfca24-7d45-4487-851a-69a53a826ab3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.662727] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07e1816-608e-4c4d-b932-6e5f656353ae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.665845] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98790b1-4124-4760-be18-96b8677eefdf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.674498] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72be5ced-6f21-4c5c-a08d-a5279de69970 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.678018] env[68569]: WARNING nova.virt.vmwareapi.driver [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1079.678376] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1079.679754] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac5bd3e-3602-462c-afc8-3f4a3f319dd3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.686061] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1079.711187] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e0408dd-d08c-4a41-aada-7da8d625ce2a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.714178] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3a0394-0255-488b-ad0c-36caf94f908e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.722779] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8336bc3d-f658-4eb2-8c6d-ce54cc70b11b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.737065] env[68569]: DEBUG nova.compute.provider_tree [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1079.783741] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1079.784043] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1079.784235] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleting the datastore file [datastore2] a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1079.784510] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-23f6d5b3-d6c1-4219-b090-6bd3d087b2e7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.793087] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1079.793087] env[68569]: value = "task-3167659" [ 1079.793087] env[68569]: _type = "Task" [ 1079.793087] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.804262] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167659, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.805793] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "d7a0631c-902d-4653-b900-2123de5bcb44" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.806038] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "d7a0631c-902d-4653-b900-2123de5bcb44" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.806290] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "d7a0631c-902d-4653-b900-2123de5bcb44-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.806480] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "d7a0631c-902d-4653-b900-2123de5bcb44-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.807063] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "d7a0631c-902d-4653-b900-2123de5bcb44-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.808895] env[68569]: INFO nova.compute.manager [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Terminating instance [ 1079.828754] env[68569]: DEBUG nova.network.neutron [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Successfully created port: d3de7a89-6fef-43c5-8fe1-1f6e07a715ee {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1079.917870] env[68569]: DEBUG nova.compute.manager [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1079.935803] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167656, 'name': ReconfigVM_Task, 'duration_secs': 0.995823} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.936524] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 7c71799e-21d9-41f8-b35c-7117354a0287/7c71799e-21d9-41f8-b35c-7117354a0287.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1079.937272] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-602781ef-3659-4ecc-87e3-057c69b8f10f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.946768] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1079.946768] env[68569]: value = "task-3167660" [ 1079.946768] env[68569]: _type = "Task" [ 1079.946768] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.958335] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167660, 'name': Rename_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.000469] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167653, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.142164] env[68569]: DEBUG nova.network.neutron [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Successfully created port: 970bdc81-4392-467c-8891-9dd4c15c0379 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1080.240486] env[68569]: DEBUG nova.scheduler.client.report [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1080.305258] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167659, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.269623} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.305530] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1080.305917] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1080.305917] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1080.316348] env[68569]: DEBUG nova.compute.manager [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1080.316573] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1080.317423] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f55d4ec-a82f-4663-b7b8-4f7e753ef409 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.327484] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1080.327484] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1cf83ce-3a8b-4a2d-b3a2-bc654022f580 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.334642] env[68569]: DEBUG oslo_vmware.api [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1080.334642] env[68569]: value = "task-3167661" [ 1080.334642] env[68569]: _type = "Task" [ 1080.334642] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.341676] env[68569]: DEBUG oslo_vmware.api [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167661, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.371894] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.458034] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167660, 'name': Rename_Task, 'duration_secs': 0.365146} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.458034] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1080.458034] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0425f907-3da5-4669-90a6-38d7b67fbeef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.464473] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1080.464473] env[68569]: value = "task-3167662" [ 1080.464473] env[68569]: _type = "Task" [ 1080.464473] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.479238] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167662, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.499386] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167653, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.405029} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.499816] env[68569]: INFO nova.virt.vmwareapi.ds_util [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_4564c366-8636-472d-a117-7cfcc28bdb7b/OSTACK_IMG_4564c366-8636-472d-a117-7cfcc28bdb7b.vmdk to [datastore1] devstack-image-cache_base/1146c72a-4d3b-4831-9e73-15f957a6f7be/1146c72a-4d3b-4831-9e73-15f957a6f7be.vmdk. [ 1080.500021] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Cleaning up location [datastore1] OSTACK_IMG_4564c366-8636-472d-a117-7cfcc28bdb7b {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1080.500221] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_4564c366-8636-472d-a117-7cfcc28bdb7b {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1080.500547] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02865672-6308-4643-9589-a06cb02c1c7e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.507903] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1080.507903] env[68569]: value = "task-3167663" [ 1080.507903] env[68569]: _type = "Task" [ 1080.507903] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.516500] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167663, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.745740] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.835s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.748167] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.239s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.749668] env[68569]: INFO nova.compute.claims [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1080.772413] env[68569]: INFO nova.scheduler.client.report [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Deleted allocations for instance c0ea0ef8-93c2-416a-8caa-a51f7a39627e [ 1080.811317] env[68569]: INFO nova.virt.block_device [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Booting with volume c8148d10-8bad-49bc-93bf-559f83378a11 at /dev/sdb [ 1080.846639] env[68569]: DEBUG oslo_vmware.api [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167661, 'name': PowerOffVM_Task, 'duration_secs': 0.218931} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.847985] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1080.848271] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1080.848572] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f17c5811-7b4e-4df3-b9c2-7d1595dd8285 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.850719] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f396829b-4827-4ca3-9dfc-2293a16435c8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.862572] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ae866a-68c6-42ba-955a-0d212c76afd9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.898813] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae89b9c4-c629-44c6-b50b-cc9fcb99cab6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.907405] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86abeaff-fb98-49fb-bf26-21d235c9c667 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.928394] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1080.928878] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1080.929234] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleting the datastore file [datastore1] d7a0631c-902d-4653-b900-2123de5bcb44 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1080.930058] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1e8ee6b-94f6-46dc-81f5-a3c9e79aba68 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.933642] env[68569]: DEBUG nova.compute.manager [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1080.948587] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230bfb5d-9fb0-46b2-8441-215f4179d06d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.951924] env[68569]: DEBUG oslo_vmware.api [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for the task: (returnval){ [ 1080.951924] env[68569]: value = "task-3167666" [ 1080.951924] env[68569]: _type = "Task" [ 1080.951924] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.961905] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4beccdae-00e7-4052-923f-c6c3915d4495 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.967057] env[68569]: DEBUG oslo_vmware.api [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167666, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.969147] env[68569]: DEBUG nova.virt.hardware [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1080.969404] env[68569]: DEBUG nova.virt.hardware [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1080.969561] env[68569]: DEBUG nova.virt.hardware [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1080.969749] env[68569]: DEBUG nova.virt.hardware [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1080.969980] env[68569]: DEBUG nova.virt.hardware [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1080.970194] env[68569]: DEBUG nova.virt.hardware [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1080.970424] env[68569]: DEBUG nova.virt.hardware [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1080.970600] env[68569]: DEBUG nova.virt.hardware [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1080.970776] env[68569]: DEBUG nova.virt.hardware [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1080.970939] env[68569]: DEBUG nova.virt.hardware [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1080.971136] env[68569]: DEBUG nova.virt.hardware [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1080.974979] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf18525-72d1-4453-91f5-8978c6356747 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.982870] env[68569]: DEBUG nova.virt.block_device [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Updating existing volume attachment record: 4da6c4b6-5d3d-4a70-aa10-f8f09f388330 {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1080.992067] env[68569]: DEBUG oslo_vmware.api [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167662, 'name': PowerOnVM_Task, 'duration_secs': 0.494846} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.992538] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1080.992890] env[68569]: INFO nova.compute.manager [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Took 7.63 seconds to spawn the instance on the hypervisor. [ 1080.993223] env[68569]: DEBUG nova.compute.manager [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1080.995145] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02984c05-4708-4f10-ab9b-83438731ee27 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.001873] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa47cae-0140-46bb-a9c5-43c870f97b9f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.036692] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167663, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.058399} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.036980] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1081.037327] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1146c72a-4d3b-4831-9e73-15f957a6f7be/1146c72a-4d3b-4831-9e73-15f957a6f7be.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.037411] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1146c72a-4d3b-4831-9e73-15f957a6f7be/1146c72a-4d3b-4831-9e73-15f957a6f7be.vmdk to [datastore1] 5eb7ff96-3a9e-470d-9cbe-644446302ecf/5eb7ff96-3a9e-470d-9cbe-644446302ecf.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1081.037680] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-738fca33-0769-4507-ae68-b71d427866c7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.045794] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1081.045794] env[68569]: value = "task-3167667" [ 1081.045794] env[68569]: _type = "Task" [ 1081.045794] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.054703] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167667, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.279286] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a87d25d9-34df-4f65-836c-010951cf8463 tempest-ServersNegativeTestJSON-1790858041 tempest-ServersNegativeTestJSON-1790858041-project-member] Lock "c0ea0ef8-93c2-416a-8caa-a51f7a39627e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.978s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.463116] env[68569]: DEBUG oslo_vmware.api [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Task: {'id': task-3167666, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151325} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.463503] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1081.463634] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1081.463792] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1081.464496] env[68569]: INFO nova.compute.manager [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1081.464496] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1081.464496] env[68569]: DEBUG nova.compute.manager [-] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1081.464802] env[68569]: DEBUG nova.network.neutron [-] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1081.546092] env[68569]: INFO nova.compute.manager [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Took 19.19 seconds to build instance. [ 1081.557786] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167667, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.958234] env[68569]: DEBUG nova.compute.manager [req-6d33e77d-2705-4ee4-9ffd-09e537526191 req-7b83b6a2-ea20-4a4c-a00a-69b17cca0024 service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Received event network-vif-plugged-d3de7a89-6fef-43c5-8fe1-1f6e07a715ee {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1081.958494] env[68569]: DEBUG oslo_concurrency.lockutils [req-6d33e77d-2705-4ee4-9ffd-09e537526191 req-7b83b6a2-ea20-4a4c-a00a-69b17cca0024 service nova] Acquiring lock "fd34691f-ebe5-4b40-994c-6278e09fc9eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.958770] env[68569]: DEBUG oslo_concurrency.lockutils [req-6d33e77d-2705-4ee4-9ffd-09e537526191 req-7b83b6a2-ea20-4a4c-a00a-69b17cca0024 service nova] Lock "fd34691f-ebe5-4b40-994c-6278e09fc9eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.958970] env[68569]: DEBUG oslo_concurrency.lockutils [req-6d33e77d-2705-4ee4-9ffd-09e537526191 req-7b83b6a2-ea20-4a4c-a00a-69b17cca0024 service nova] Lock "fd34691f-ebe5-4b40-994c-6278e09fc9eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.959820] env[68569]: DEBUG nova.compute.manager [req-6d33e77d-2705-4ee4-9ffd-09e537526191 req-7b83b6a2-ea20-4a4c-a00a-69b17cca0024 service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] No waiting events found dispatching network-vif-plugged-d3de7a89-6fef-43c5-8fe1-1f6e07a715ee {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1081.960113] env[68569]: WARNING nova.compute.manager [req-6d33e77d-2705-4ee4-9ffd-09e537526191 req-7b83b6a2-ea20-4a4c-a00a-69b17cca0024 service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Received unexpected event network-vif-plugged-d3de7a89-6fef-43c5-8fe1-1f6e07a715ee for instance with vm_state building and task_state spawning. [ 1081.991624] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc09d8d7-1d78-4dbb-ae26-f12d9cddc24c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.001647] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e5a915-231f-405f-be6e-1397f0a763d1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.041854] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad56678-16c1-4073-a1fb-48226148442a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.053368] env[68569]: DEBUG oslo_concurrency.lockutils [None req-212d7139-2f79-4f1f-9296-eb9966150ae1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "7c71799e-21d9-41f8-b35c-7117354a0287" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.704s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.059550] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b274c8f4-bbd4-44d2-8e84-6c789fc93300 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.070379] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167667, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.082054] env[68569]: DEBUG nova.compute.provider_tree [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.115274] env[68569]: DEBUG nova.network.neutron [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Successfully updated port: d3de7a89-6fef-43c5-8fe1-1f6e07a715ee {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1082.564510] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167667, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.564825] env[68569]: DEBUG nova.network.neutron [-] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.583964] env[68569]: DEBUG nova.scheduler.client.report [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1082.856097] env[68569]: DEBUG oslo_concurrency.lockutils [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "7c71799e-21d9-41f8-b35c-7117354a0287" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.856097] env[68569]: DEBUG oslo_concurrency.lockutils [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "7c71799e-21d9-41f8-b35c-7117354a0287" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.856097] env[68569]: DEBUG oslo_concurrency.lockutils [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "7c71799e-21d9-41f8-b35c-7117354a0287-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.856097] env[68569]: DEBUG oslo_concurrency.lockutils [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "7c71799e-21d9-41f8-b35c-7117354a0287-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.856097] env[68569]: DEBUG oslo_concurrency.lockutils [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "7c71799e-21d9-41f8-b35c-7117354a0287-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.858529] env[68569]: INFO nova.compute.manager [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Terminating instance [ 1082.935675] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.935965] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.070665] env[68569]: INFO nova.compute.manager [-] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Took 1.61 seconds to deallocate network for instance. [ 1083.071572] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167667, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.090685] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.342s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.090685] env[68569]: DEBUG nova.compute.manager [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1083.099814] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 9.684s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.100042] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.100204] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68569) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1083.100531] env[68569]: DEBUG oslo_concurrency.lockutils [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.288s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.100696] env[68569]: DEBUG nova.objects.instance [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68569) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1083.108294] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8cc2ab-43c7-4b82-91c7-1dfcba5a6492 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.123432] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96bb3c23-0eb2-4962-8c39-3691e6db4966 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.144331] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7922f145-dfe6-426c-afa3-b317ecfb3d36 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.159834] env[68569]: DEBUG nova.virt.hardware [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1083.160116] env[68569]: DEBUG nova.virt.hardware [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1083.160275] env[68569]: DEBUG nova.virt.hardware [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1083.160453] env[68569]: DEBUG nova.virt.hardware [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1083.160597] env[68569]: DEBUG nova.virt.hardware [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1083.160747] env[68569]: DEBUG nova.virt.hardware [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1083.160972] env[68569]: DEBUG nova.virt.hardware [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1083.161151] env[68569]: DEBUG nova.virt.hardware [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1083.161319] env[68569]: DEBUG nova.virt.hardware [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1083.161523] env[68569]: DEBUG nova.virt.hardware [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1083.161699] env[68569]: DEBUG nova.virt.hardware [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1083.162844] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d5e463-3e64-432f-a6c8-4a2183c58e69 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.168416] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9570eb4-fe11-42b8-b3bd-d925e5f34609 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.177793] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151c5f9b-95d6-4bfb-8dab-77dddad72dfd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.207460] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178989MB free_disk=128GB free_vcpus=48 pci_devices=None {{(pid=68569) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1083.207705] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.223022] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:d0:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f8442aa5-73db-4599-8564-b98a6ea26b9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f0211609-4c97-40c7-ba53-6f3802350533', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.228635] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1083.229305] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1083.229547] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24934c77-eecd-48bc-8f83-3cf39adad951 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.249748] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.249748] env[68569]: value = "task-3167668" [ 1083.249748] env[68569]: _type = "Task" [ 1083.249748] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.258504] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167668, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.365806] env[68569]: DEBUG nova.compute.manager [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1083.366076] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1083.367099] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d79c888-cbfe-4eb5-8669-12d57b5f7e71 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.374784] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1083.375082] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e8078f1-daa2-4419-8f10-7a81c1f31d37 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.381884] env[68569]: DEBUG oslo_vmware.api [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1083.381884] env[68569]: value = "task-3167669" [ 1083.381884] env[68569]: _type = "Task" [ 1083.381884] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.390088] env[68569]: DEBUG oslo_vmware.api [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167669, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.446409] env[68569]: INFO nova.compute.manager [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Detaching volume 90a6a766-5e36-48f2-84f2-a3b3be1783a7 [ 1083.500577] env[68569]: INFO nova.virt.block_device [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Attempting to driver detach volume 90a6a766-5e36-48f2-84f2-a3b3be1783a7 from mountpoint /dev/sdb [ 1083.500844] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1083.501065] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633684', 'volume_id': '90a6a766-5e36-48f2-84f2-a3b3be1783a7', 'name': 'volume-90a6a766-5e36-48f2-84f2-a3b3be1783a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948', 'attached_at': '', 'detached_at': '', 'volume_id': '90a6a766-5e36-48f2-84f2-a3b3be1783a7', 'serial': '90a6a766-5e36-48f2-84f2-a3b3be1783a7'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1083.501995] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d74480-925d-43f7-addb-5776eaae2b83 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.525597] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1abb6c-c2d0-4620-959a-c98934d0d10a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.535631] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f4c9fe-555b-4ccc-94ff-0cb5bb3d7be8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.565734] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6de839d-308e-412e-9d59-60626fcc32e3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.574422] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167667, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.39267} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.585199] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1146c72a-4d3b-4831-9e73-15f957a6f7be/1146c72a-4d3b-4831-9e73-15f957a6f7be.vmdk to [datastore1] 5eb7ff96-3a9e-470d-9cbe-644446302ecf/5eb7ff96-3a9e-470d-9cbe-644446302ecf.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1083.586409] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.586776] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] The volume has not been displaced from its original location: [datastore2] volume-90a6a766-5e36-48f2-84f2-a3b3be1783a7/volume-90a6a766-5e36-48f2-84f2-a3b3be1783a7.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1083.592303] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Reconfiguring VM instance instance-00000054 to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1083.593217] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60b3820-19e3-4611-9878-88731c3be986 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.595841] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-984d1367-3e81-483c-9475-311865b26cc8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.613099] env[68569]: DEBUG nova.compute.utils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1083.618787] env[68569]: DEBUG nova.compute.manager [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1083.619049] env[68569]: DEBUG nova.network.neutron [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1083.640349] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 5eb7ff96-3a9e-470d-9cbe-644446302ecf/5eb7ff96-3a9e-470d-9cbe-644446302ecf.vmdk or device None with type streamOptimized {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1083.643657] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5417190-9099-463b-99e7-7b9d2f131fc0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.660043] env[68569]: DEBUG oslo_vmware.api [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1083.660043] env[68569]: value = "task-3167670" [ 1083.660043] env[68569]: _type = "Task" [ 1083.660043] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.665297] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1083.665297] env[68569]: value = "task-3167671" [ 1083.665297] env[68569]: _type = "Task" [ 1083.665297] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.669191] env[68569]: DEBUG oslo_vmware.api [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167670, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.678023] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167671, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.699243] env[68569]: DEBUG nova.policy [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b5878b8c7304fce9e150e9be38f10c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7613bcf5361d4b08a8d864e59b7fe858', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1083.759320] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167668, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.891687] env[68569]: DEBUG oslo_vmware.api [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167669, 'name': PowerOffVM_Task, 'duration_secs': 0.21956} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.891951] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1083.892133] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1083.892380] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7616d834-6f05-48b6-b688-2dd872ebdc2d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.952377] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1083.952605] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1083.952774] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleting the datastore file [datastore1] 7c71799e-21d9-41f8-b35c-7117354a0287 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1083.953068] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5113ad6-724e-4f42-b0d4-108e328750f2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.959057] env[68569]: DEBUG oslo_vmware.api [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1083.959057] env[68569]: value = "task-3167673" [ 1083.959057] env[68569]: _type = "Task" [ 1083.959057] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.966604] env[68569]: DEBUG oslo_vmware.api [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.997788] env[68569]: DEBUG nova.compute.manager [req-51e57769-675c-4380-b949-cc31493bd596 req-03c865b0-844a-4022-9187-37634486b22c service nova] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Received event network-vif-deleted-e9d20c97-bf70-4f30-8424-5db6b70e5677 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1083.998203] env[68569]: DEBUG nova.compute.manager [req-51e57769-675c-4380-b949-cc31493bd596 req-03c865b0-844a-4022-9187-37634486b22c service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Received event network-changed-d3de7a89-6fef-43c5-8fe1-1f6e07a715ee {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1083.998416] env[68569]: DEBUG nova.compute.manager [req-51e57769-675c-4380-b949-cc31493bd596 req-03c865b0-844a-4022-9187-37634486b22c service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Refreshing instance network info cache due to event network-changed-d3de7a89-6fef-43c5-8fe1-1f6e07a715ee. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1083.998645] env[68569]: DEBUG oslo_concurrency.lockutils [req-51e57769-675c-4380-b949-cc31493bd596 req-03c865b0-844a-4022-9187-37634486b22c service nova] Acquiring lock "refresh_cache-fd34691f-ebe5-4b40-994c-6278e09fc9eb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.998810] env[68569]: DEBUG oslo_concurrency.lockutils [req-51e57769-675c-4380-b949-cc31493bd596 req-03c865b0-844a-4022-9187-37634486b22c service nova] Acquired lock "refresh_cache-fd34691f-ebe5-4b40-994c-6278e09fc9eb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.998985] env[68569]: DEBUG nova.network.neutron [req-51e57769-675c-4380-b949-cc31493bd596 req-03c865b0-844a-4022-9187-37634486b22c service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Refreshing network info cache for port d3de7a89-6fef-43c5-8fe1-1f6e07a715ee {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1084.116202] env[68569]: DEBUG nova.compute.manager [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1084.119996] env[68569]: DEBUG oslo_concurrency.lockutils [None req-209b6726-2c53-46c7-b676-69f44feac419 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.120892] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.751s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.171620] env[68569]: DEBUG oslo_vmware.api [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167670, 'name': ReconfigVM_Task, 'duration_secs': 0.211831} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.179019] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Reconfigured VM instance instance-00000054 to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1084.183331] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fbf8b60-59f8-40f4-95c8-17b56e884037 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.201948] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167671, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.205929] env[68569]: DEBUG oslo_vmware.api [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1084.205929] env[68569]: value = "task-3167674" [ 1084.205929] env[68569]: _type = "Task" [ 1084.205929] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.218083] env[68569]: DEBUG oslo_vmware.api [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167674, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.265814] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167668, 'name': CreateVM_Task, 'duration_secs': 0.654691} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.266282] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1084.267789] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.267789] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.267789] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1084.267789] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6230e92-3bcc-45bb-afbc-cf27464581d3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.272594] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1084.272594] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521bcfe4-ee54-a9db-2f55-5a370e36f831" [ 1084.272594] env[68569]: _type = "Task" [ 1084.272594] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.284098] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521bcfe4-ee54-a9db-2f55-5a370e36f831, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.394282] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda1b677-3fec-4ab9-a22c-347819cdcdd8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.401496] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2511cd75-09e3-462d-820d-5e90b6025a00 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.431608] env[68569]: DEBUG nova.network.neutron [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Successfully created port: b8cb9bd3-0cdd-4183-b5f7-e26fe241764b {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1084.434395] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab7f267-2446-451a-a0b9-65a7b02f251e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.441961] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd373e11-0cf3-4f10-a217-bface91f9fc9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.454921] env[68569]: DEBUG nova.compute.provider_tree [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.468206] env[68569]: DEBUG oslo_vmware.api [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167673, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133854} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.468206] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1084.468568] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1084.468720] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1084.468915] env[68569]: INFO nova.compute.manager [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1084.469171] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1084.470083] env[68569]: DEBUG nova.compute.manager [-] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1084.470190] env[68569]: DEBUG nova.network.neutron [-] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1084.588484] env[68569]: DEBUG nova.network.neutron [req-51e57769-675c-4380-b949-cc31493bd596 req-03c865b0-844a-4022-9187-37634486b22c service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1084.621867] env[68569]: DEBUG nova.network.neutron [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Successfully updated port: 970bdc81-4392-467c-8891-9dd4c15c0379 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1084.682812] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167671, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.713563] env[68569]: DEBUG oslo_vmware.api [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167674, 'name': ReconfigVM_Task, 'duration_secs': 0.146394} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.713883] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633684', 'volume_id': '90a6a766-5e36-48f2-84f2-a3b3be1783a7', 'name': 'volume-90a6a766-5e36-48f2-84f2-a3b3be1783a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948', 'attached_at': '', 'detached_at': '', 'volume_id': '90a6a766-5e36-48f2-84f2-a3b3be1783a7', 'serial': '90a6a766-5e36-48f2-84f2-a3b3be1783a7'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1084.789009] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]521bcfe4-ee54-a9db-2f55-5a370e36f831, 'name': SearchDatastore_Task, 'duration_secs': 0.009914} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.789356] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.789644] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.789865] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.790020] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.790202] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1084.790485] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70674b07-3fe9-432a-9982-beee1e22be9d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.799917] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1084.800110] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1084.800820] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45480550-5a96-4c87-b2aa-dc7f8460f56b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.805773] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1084.805773] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f44794-085f-f9d2-44c6-25246452fb98" [ 1084.805773] env[68569]: _type = "Task" [ 1084.805773] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.806583] env[68569]: DEBUG nova.network.neutron [req-51e57769-675c-4380-b949-cc31493bd596 req-03c865b0-844a-4022-9187-37634486b22c service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.817268] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f44794-085f-f9d2-44c6-25246452fb98, 'name': SearchDatastore_Task, 'duration_secs': 0.008981} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.818172] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b553870-4f95-4019-8774-3f424c3ea81f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.823269] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1084.823269] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f26fb0-4b6e-d6de-017c-3ac425efe92f" [ 1084.823269] env[68569]: _type = "Task" [ 1084.823269] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.833041] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f26fb0-4b6e-d6de-017c-3ac425efe92f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.908965] env[68569]: DEBUG nova.compute.manager [req-33c3d84e-5581-46a4-9718-ed9982444908 req-b4174591-fc0b-4afe-9048-ee08121193c0 service nova] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Received event network-vif-deleted-fb7d829c-cd12-4906-a87e-6d26d31f5771 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1084.909131] env[68569]: INFO nova.compute.manager [req-33c3d84e-5581-46a4-9718-ed9982444908 req-b4174591-fc0b-4afe-9048-ee08121193c0 service nova] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Neutron deleted interface fb7d829c-cd12-4906-a87e-6d26d31f5771; detaching it from the instance and deleting it from the info cache [ 1084.909359] env[68569]: DEBUG nova.network.neutron [req-33c3d84e-5581-46a4-9718-ed9982444908 req-b4174591-fc0b-4afe-9048-ee08121193c0 service nova] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.957625] env[68569]: DEBUG nova.scheduler.client.report [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1085.124791] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "refresh_cache-fd34691f-ebe5-4b40-994c-6278e09fc9eb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.130223] env[68569]: DEBUG nova.compute.manager [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1085.155369] env[68569]: DEBUG nova.virt.hardware [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1085.155616] env[68569]: DEBUG nova.virt.hardware [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1085.155771] env[68569]: DEBUG nova.virt.hardware [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1085.155948] env[68569]: DEBUG nova.virt.hardware [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1085.156108] env[68569]: DEBUG nova.virt.hardware [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1085.156256] env[68569]: DEBUG nova.virt.hardware [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1085.156462] env[68569]: DEBUG nova.virt.hardware [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1085.156638] env[68569]: DEBUG nova.virt.hardware [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1085.156881] env[68569]: DEBUG nova.virt.hardware [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1085.157061] env[68569]: DEBUG nova.virt.hardware [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1085.157492] env[68569]: DEBUG nova.virt.hardware [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1085.158113] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15aea99-b09e-438c-b544-6e230735771c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.166048] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221158ef-3865-4ae3-a57c-10cbe9046442 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.189087] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167671, 'name': ReconfigVM_Task, 'duration_secs': 1.039631} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.190148] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 5eb7ff96-3a9e-470d-9cbe-644446302ecf/5eb7ff96-3a9e-470d-9cbe-644446302ecf.vmdk or device None with type streamOptimized {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1085.190777] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4884bde-dd7b-4dd4-b29c-cd6a010bf771 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.197588] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1085.197588] env[68569]: value = "task-3167675" [ 1085.197588] env[68569]: _type = "Task" [ 1085.197588] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.205524] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167675, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.261857] env[68569]: DEBUG nova.objects.instance [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lazy-loading 'flavor' on Instance uuid 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1085.308994] env[68569]: DEBUG oslo_concurrency.lockutils [req-51e57769-675c-4380-b949-cc31493bd596 req-03c865b0-844a-4022-9187-37634486b22c service nova] Releasing lock "refresh_cache-fd34691f-ebe5-4b40-994c-6278e09fc9eb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.309749] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquired lock "refresh_cache-fd34691f-ebe5-4b40-994c-6278e09fc9eb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.310324] env[68569]: DEBUG nova.network.neutron [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1085.335826] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f26fb0-4b6e-d6de-017c-3ac425efe92f, 'name': SearchDatastore_Task, 'duration_secs': 0.008753} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.336255] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.336536] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3/a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1085.336793] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b19d943-4bf5-4d5e-ac8a-c9dcd70a8e08 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.344991] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1085.344991] env[68569]: value = "task-3167676" [ 1085.344991] env[68569]: _type = "Task" [ 1085.344991] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.352701] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167676, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.388616] env[68569]: DEBUG nova.network.neutron [-] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.412291] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2967d0bc-b412-48cf-a947-a91036cf35df {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.421457] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef0288c-bfd4-49ca-bfe7-4376c074e4a6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.454538] env[68569]: DEBUG nova.compute.manager [req-33c3d84e-5581-46a4-9718-ed9982444908 req-b4174591-fc0b-4afe-9048-ee08121193c0 service nova] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Detach interface failed, port_id=fb7d829c-cd12-4906-a87e-6d26d31f5771, reason: Instance 7c71799e-21d9-41f8-b35c-7117354a0287 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1085.462950] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.342s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.462950] env[68569]: INFO nova.compute.manager [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Successfully reverted task state from resize_migrating on failure for instance. [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server [None req-d2fbc46d-5b25-42c8-99ae-828604eb0571 tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Exception during message handling: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 168, in decorated_function [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 159, in decorated_function [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 215, in decorated_function [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 205, in decorated_function [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6583, in resize_instance [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6580, in resize_instance [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server self._resize_instance(context, instance, image, migration, [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server disk_info = self.driver.migrate_disk_and_power_off( [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1085.470803] env[68569]: ERROR oslo_messaging.rpc.server [ 1085.472526] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.264s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.710097] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167675, 'name': Rename_Task, 'duration_secs': 0.247379} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.711183] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1085.711490] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb82ffbb-be89-4186-a803-154f280e6fcb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.721627] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1085.721627] env[68569]: value = "task-3167677" [ 1085.721627] env[68569]: _type = "Task" [ 1085.721627] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.733190] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167677, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.859102] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167676, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.893556] env[68569]: INFO nova.compute.manager [-] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Took 1.42 seconds to deallocate network for instance. [ 1086.053158] env[68569]: DEBUG nova.compute.manager [req-2aacb75a-39c8-4bbd-83d6-2c60298e486f req-706ca1a2-e3be-4bed-a10e-1730c5a07571 service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Received event network-vif-plugged-970bdc81-4392-467c-8891-9dd4c15c0379 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1086.053158] env[68569]: DEBUG oslo_concurrency.lockutils [req-2aacb75a-39c8-4bbd-83d6-2c60298e486f req-706ca1a2-e3be-4bed-a10e-1730c5a07571 service nova] Acquiring lock "fd34691f-ebe5-4b40-994c-6278e09fc9eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.053158] env[68569]: DEBUG oslo_concurrency.lockutils [req-2aacb75a-39c8-4bbd-83d6-2c60298e486f req-706ca1a2-e3be-4bed-a10e-1730c5a07571 service nova] Lock "fd34691f-ebe5-4b40-994c-6278e09fc9eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.053158] env[68569]: DEBUG oslo_concurrency.lockutils [req-2aacb75a-39c8-4bbd-83d6-2c60298e486f req-706ca1a2-e3be-4bed-a10e-1730c5a07571 service nova] Lock "fd34691f-ebe5-4b40-994c-6278e09fc9eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.053158] env[68569]: DEBUG nova.compute.manager [req-2aacb75a-39c8-4bbd-83d6-2c60298e486f req-706ca1a2-e3be-4bed-a10e-1730c5a07571 service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] No waiting events found dispatching network-vif-plugged-970bdc81-4392-467c-8891-9dd4c15c0379 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1086.054707] env[68569]: WARNING nova.compute.manager [req-2aacb75a-39c8-4bbd-83d6-2c60298e486f req-706ca1a2-e3be-4bed-a10e-1730c5a07571 service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Received unexpected event network-vif-plugged-970bdc81-4392-467c-8891-9dd4c15c0379 for instance with vm_state building and task_state spawning. [ 1086.055033] env[68569]: DEBUG nova.compute.manager [req-2aacb75a-39c8-4bbd-83d6-2c60298e486f req-706ca1a2-e3be-4bed-a10e-1730c5a07571 service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Received event network-changed-970bdc81-4392-467c-8891-9dd4c15c0379 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1086.055445] env[68569]: DEBUG nova.compute.manager [req-2aacb75a-39c8-4bbd-83d6-2c60298e486f req-706ca1a2-e3be-4bed-a10e-1730c5a07571 service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Refreshing instance network info cache due to event network-changed-970bdc81-4392-467c-8891-9dd4c15c0379. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1086.055724] env[68569]: DEBUG oslo_concurrency.lockutils [req-2aacb75a-39c8-4bbd-83d6-2c60298e486f req-706ca1a2-e3be-4bed-a10e-1730c5a07571 service nova] Acquiring lock "refresh_cache-fd34691f-ebe5-4b40-994c-6278e09fc9eb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.056754] env[68569]: DEBUG nova.network.neutron [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1086.195933] env[68569]: DEBUG oslo_concurrency.lockutils [None req-43748b3f-17a1-4abe-8552-cf84eee16015 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.237790] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167677, 'name': PowerOnVM_Task} progress is 82%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.272107] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48e6afda-9c70-4bd0-93e1-93ef4593b953 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.334s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.272107] env[68569]: DEBUG oslo_concurrency.lockutils [None req-43748b3f-17a1-4abe-8552-cf84eee16015 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.075s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.272107] env[68569]: DEBUG nova.compute.manager [None req-43748b3f-17a1-4abe-8552-cf84eee16015 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1086.272107] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbea77e-9fa0-42d1-bcc8-01cfda2b3ef6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.279905] env[68569]: DEBUG nova.compute.manager [None req-43748b3f-17a1-4abe-8552-cf84eee16015 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68569) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1086.281358] env[68569]: DEBUG nova.objects.instance [None req-43748b3f-17a1-4abe-8552-cf84eee16015 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lazy-loading 'flavor' on Instance uuid 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1086.337079] env[68569]: DEBUG nova.network.neutron [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Successfully updated port: b8cb9bd3-0cdd-4183-b5f7-e26fe241764b {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1086.359633] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167676, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551213} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.359908] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3/a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1086.360141] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1086.360402] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e576c1e0-0e66-43e5-b182-80c073bcd24a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.369172] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1086.369172] env[68569]: value = "task-3167678" [ 1086.369172] env[68569]: _type = "Task" [ 1086.369172] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.381283] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167678, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.400473] env[68569]: DEBUG oslo_concurrency.lockutils [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.487046] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Applying migration context for instance d7a0631c-902d-4653-b900-2123de5bcb44 as it has an incoming, in-progress migration fadc6c12-6187-4e3b-b55a-610a9dbc0520. Migration status is error {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1086.488854] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=68569) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1563}} [ 1086.516233] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance a9e87dfc-6e00-4e55-8a8f-bc3174b991da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1086.516478] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 61aa0997-ffa6-4551-bdaa-132026e240f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1086.516590] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance db75de86-9dda-42b2-9e7a-55e2ba5adad1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1086.516729] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance f5f8b054-7ee4-40f5-84de-1cee02949cd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1086.516911] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1086.517095] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1086.517294] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance a7145443-aacb-4d9e-8e39-3741d0630849 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1086.517455] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 671f7e6d-703e-48a9-8509-2a8924afe911 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1086.517622] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1086.517783] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 24bcffcc-6da1-4ae5-b802-88e9364eaf0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1086.517950] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 5eb7ff96-3a9e-470d-9cbe-644446302ecf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1086.518200] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 7c71799e-21d9-41f8-b35c-7117354a0287 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1086.518433] env[68569]: WARNING nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance d7a0631c-902d-4653-b900-2123de5bcb44 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1086.518541] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance fd34691f-ebe5-4b40-994c-6278e09fc9eb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1086.518663] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance ee188712-b0e0-44ee-80b4-be72da32299f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1086.518910] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1086.519070] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3200MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1086.629482] env[68569]: DEBUG nova.network.neutron [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Updating instance_info_cache with network_info: [{"id": "d3de7a89-6fef-43c5-8fe1-1f6e07a715ee", "address": "fa:16:3e:af:3a:6e", "network": {"id": "f95ae0d4-bf99-4547-96cf-a774a40142a6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-171051699", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83427ebdc6364f7887409f7bfc35f025", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3de7a89-6f", "ovs_interfaceid": "d3de7a89-6fef-43c5-8fe1-1f6e07a715ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "970bdc81-4392-467c-8891-9dd4c15c0379", "address": "fa:16:3e:3e:ff:a2", "network": {"id": "5583930a-1c67-4178-a42e-cc86a9d11eee", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-398959644", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.67", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "83427ebdc6364f7887409f7bfc35f025", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec1528b-3e87-477b-8ab2-02696ad47e66", "external-id": "nsx-vlan-transportzone-180", "segmentation_id": 180, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap970bdc81-43", "ovs_interfaceid": "970bdc81-4392-467c-8891-9dd4c15c0379", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.735599] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167677, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.782463] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61fa12f-0165-4983-af2d-d35e17f3ed72 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.796403] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fb1c61-e278-4d66-bba8-73beaba6de24 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.842237] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.842237] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.842237] env[68569]: DEBUG nova.network.neutron [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1086.845336] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73be050d-0aa2-40e5-b3e0-aea545580d4e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.855246] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e8f0e4-bec9-4124-bbc7-7e178d97f4ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.872817] env[68569]: DEBUG nova.compute.provider_tree [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1086.882876] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167678, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076718} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.883632] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1086.884738] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78f8c5c-41bd-456a-90d2-819b9c391f7c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.909857] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3/a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1086.910474] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b0d8458-b9d6-418b-a886-ab2bae5d9382 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.931333] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1086.931333] env[68569]: value = "task-3167679" [ 1086.931333] env[68569]: _type = "Task" [ 1086.931333] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.940477] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167679, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.132102] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Releasing lock "refresh_cache-fd34691f-ebe5-4b40-994c-6278e09fc9eb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.132102] env[68569]: DEBUG nova.compute.manager [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Instance network_info: |[{"id": "d3de7a89-6fef-43c5-8fe1-1f6e07a715ee", "address": "fa:16:3e:af:3a:6e", "network": {"id": "f95ae0d4-bf99-4547-96cf-a774a40142a6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-171051699", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83427ebdc6364f7887409f7bfc35f025", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3de7a89-6f", "ovs_interfaceid": "d3de7a89-6fef-43c5-8fe1-1f6e07a715ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "970bdc81-4392-467c-8891-9dd4c15c0379", "address": "fa:16:3e:3e:ff:a2", "network": {"id": "5583930a-1c67-4178-a42e-cc86a9d11eee", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-398959644", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.67", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "83427ebdc6364f7887409f7bfc35f025", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec1528b-3e87-477b-8ab2-02696ad47e66", "external-id": "nsx-vlan-transportzone-180", "segmentation_id": 180, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap970bdc81-43", "ovs_interfaceid": "970bdc81-4392-467c-8891-9dd4c15c0379", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1087.132336] env[68569]: DEBUG oslo_concurrency.lockutils [req-2aacb75a-39c8-4bbd-83d6-2c60298e486f req-706ca1a2-e3be-4bed-a10e-1730c5a07571 service nova] Acquired lock "refresh_cache-fd34691f-ebe5-4b40-994c-6278e09fc9eb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.132395] env[68569]: DEBUG nova.network.neutron [req-2aacb75a-39c8-4bbd-83d6-2c60298e486f req-706ca1a2-e3be-4bed-a10e-1730c5a07571 service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Refreshing network info cache for port 970bdc81-4392-467c-8891-9dd4c15c0379 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1087.133643] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:3a:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '098df9b7-d759-47f7-b756-334848cb423b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3de7a89-6fef-43c5-8fe1-1f6e07a715ee', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:ff:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bec1528b-3e87-477b-8ab2-02696ad47e66', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '970bdc81-4392-467c-8891-9dd4c15c0379', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1087.143639] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1087.144423] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1087.144423] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23f39256-bdc1-46c7-9bb6-871f8e054a31 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.166804] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1087.166804] env[68569]: value = "task-3167680" [ 1087.166804] env[68569]: _type = "Task" [ 1087.166804] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.175019] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167680, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.232952] env[68569]: DEBUG oslo_vmware.api [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167677, 'name': PowerOnVM_Task, 'duration_secs': 1.099249} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.232952] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1087.232952] env[68569]: INFO nova.compute.manager [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Took 16.21 seconds to spawn the instance on the hypervisor. [ 1087.233482] env[68569]: DEBUG nova.compute.manager [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1087.233892] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da218ba-d011-4a8e-b831-c6320250d321 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.306498] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-43748b3f-17a1-4abe-8552-cf84eee16015 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1087.307191] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a72c674f-2426-49f4-8e64-9caf9a2a1c95 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.315498] env[68569]: DEBUG oslo_vmware.api [None req-43748b3f-17a1-4abe-8552-cf84eee16015 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1087.315498] env[68569]: value = "task-3167681" [ 1087.315498] env[68569]: _type = "Task" [ 1087.315498] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.327169] env[68569]: DEBUG oslo_vmware.api [None req-43748b3f-17a1-4abe-8552-cf84eee16015 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167681, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.378923] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1087.383548] env[68569]: DEBUG nova.network.neutron [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1087.445255] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167679, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.537077] env[68569]: DEBUG nova.network.neutron [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Updating instance_info_cache with network_info: [{"id": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "address": "fa:16:3e:ec:47:b9", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8cb9bd3-0c", "ovs_interfaceid": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.676712] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167680, 'name': CreateVM_Task, 'duration_secs': 0.457738} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.676886] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1087.677651] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.677816] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.678260] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1087.678511] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b05893c-9414-429f-9f44-0a7914b029c4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.689442] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1087.689442] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52324d5e-c417-d6f1-8b64-fc7ca7a65c2d" [ 1087.689442] env[68569]: _type = "Task" [ 1087.689442] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.694479] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52324d5e-c417-d6f1-8b64-fc7ca7a65c2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.756387] env[68569]: INFO nova.compute.manager [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Took 27.17 seconds to build instance. [ 1087.830992] env[68569]: DEBUG oslo_vmware.api [None req-43748b3f-17a1-4abe-8552-cf84eee16015 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167681, 'name': PowerOffVM_Task, 'duration_secs': 0.2039} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.831388] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-43748b3f-17a1-4abe-8552-cf84eee16015 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1087.831758] env[68569]: DEBUG nova.compute.manager [None req-43748b3f-17a1-4abe-8552-cf84eee16015 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1087.833559] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468b653d-1942-4cb4-bf5a-6818fcc31a0d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.887265] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1087.887479] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.416s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.890158] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.304s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.890349] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.892188] env[68569]: DEBUG oslo_concurrency.lockutils [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.492s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.892330] env[68569]: DEBUG nova.objects.instance [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lazy-loading 'resources' on Instance uuid 7c71799e-21d9-41f8-b35c-7117354a0287 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1087.912629] env[68569]: INFO nova.scheduler.client.report [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Deleted allocations for instance d7a0631c-902d-4653-b900-2123de5bcb44 [ 1087.938036] env[68569]: DEBUG nova.network.neutron [req-2aacb75a-39c8-4bbd-83d6-2c60298e486f req-706ca1a2-e3be-4bed-a10e-1730c5a07571 service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Updated VIF entry in instance network info cache for port 970bdc81-4392-467c-8891-9dd4c15c0379. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1087.938486] env[68569]: DEBUG nova.network.neutron [req-2aacb75a-39c8-4bbd-83d6-2c60298e486f req-706ca1a2-e3be-4bed-a10e-1730c5a07571 service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Updating instance_info_cache with network_info: [{"id": "d3de7a89-6fef-43c5-8fe1-1f6e07a715ee", "address": "fa:16:3e:af:3a:6e", "network": {"id": "f95ae0d4-bf99-4547-96cf-a774a40142a6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-171051699", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83427ebdc6364f7887409f7bfc35f025", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3de7a89-6f", "ovs_interfaceid": "d3de7a89-6fef-43c5-8fe1-1f6e07a715ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "970bdc81-4392-467c-8891-9dd4c15c0379", "address": "fa:16:3e:3e:ff:a2", "network": {"id": "5583930a-1c67-4178-a42e-cc86a9d11eee", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-398959644", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.67", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "83427ebdc6364f7887409f7bfc35f025", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec1528b-3e87-477b-8ab2-02696ad47e66", "external-id": "nsx-vlan-transportzone-180", "segmentation_id": 180, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap970bdc81-43", "ovs_interfaceid": "970bdc81-4392-467c-8891-9dd4c15c0379", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.945678] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167679, 'name': ReconfigVM_Task, 'duration_secs': 0.545564} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.946578] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Reconfigured VM instance instance-0000005a to attach disk [datastore1] a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3/a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1087.947919] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'encryption_options': None, 'device_type': 'disk', 'disk_bus': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'encryption_format': None, 'guest_format': None, 'device_name': '/dev/sda', 'encrypted': False, 'image_id': 'cfcf6154-fe87-45d3-9aaf-2d3604c95629'}], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633698', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'name': 'volume-c8148d10-8bad-49bc-93bf-559f83378a11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3', 'attached_at': '', 'detached_at': '', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'serial': 'c8148d10-8bad-49bc-93bf-559f83378a11'}, 'delete_on_termination': False, 'disk_bus': None, 'boot_index': None, 'mount_device': '/dev/sdb', 'guest_format': None, 'attachment_id': '4da6c4b6-5d3d-4a70-aa10-f8f09f388330', 'volume_type': None}], 'swap': None} {{(pid=68569) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1087.948346] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1087.948585] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633698', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'name': 'volume-c8148d10-8bad-49bc-93bf-559f83378a11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3', 'attached_at': '', 'detached_at': '', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'serial': 'c8148d10-8bad-49bc-93bf-559f83378a11'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1087.950606] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fd022b-b58d-4fb5-bab6-ec3aa1376cb5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.969257] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccae4685-2ffb-457d-9171-fb40c99f80f6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.995092] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] volume-c8148d10-8bad-49bc-93bf-559f83378a11/volume-c8148d10-8bad-49bc-93bf-559f83378a11.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1087.995691] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3fe1c554-384d-435d-a801-bfd28af16e16 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.014590] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1088.014590] env[68569]: value = "task-3167682" [ 1088.014590] env[68569]: _type = "Task" [ 1088.014590] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.022912] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167682, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.040180] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.040529] env[68569]: DEBUG nova.compute.manager [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Instance network_info: |[{"id": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "address": "fa:16:3e:ec:47:b9", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8cb9bd3-0c", "ovs_interfaceid": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1088.041617] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:47:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4df917f7-847a-4c0e-b0e3-69a52e4a1554', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8cb9bd3-0cdd-4183-b5f7-e26fe241764b', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1088.049970] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1088.050359] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1088.051090] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a2852c9a-ee5a-43e7-b86b-5622b953f8ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.075880] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1088.075880] env[68569]: value = "task-3167683" [ 1088.075880] env[68569]: _type = "Task" [ 1088.075880] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.087812] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167683, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.123858] env[68569]: DEBUG nova.compute.manager [req-1ca3744c-4e5f-45da-9eec-63f934b83a37 req-534fcae7-a9a4-4b71-966e-9d70bccf29cd service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Received event network-vif-plugged-b8cb9bd3-0cdd-4183-b5f7-e26fe241764b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1088.124187] env[68569]: DEBUG oslo_concurrency.lockutils [req-1ca3744c-4e5f-45da-9eec-63f934b83a37 req-534fcae7-a9a4-4b71-966e-9d70bccf29cd service nova] Acquiring lock "ee188712-b0e0-44ee-80b4-be72da32299f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.124445] env[68569]: DEBUG oslo_concurrency.lockutils [req-1ca3744c-4e5f-45da-9eec-63f934b83a37 req-534fcae7-a9a4-4b71-966e-9d70bccf29cd service nova] Lock "ee188712-b0e0-44ee-80b4-be72da32299f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.126247] env[68569]: DEBUG oslo_concurrency.lockutils [req-1ca3744c-4e5f-45da-9eec-63f934b83a37 req-534fcae7-a9a4-4b71-966e-9d70bccf29cd service nova] Lock "ee188712-b0e0-44ee-80b4-be72da32299f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.126247] env[68569]: DEBUG nova.compute.manager [req-1ca3744c-4e5f-45da-9eec-63f934b83a37 req-534fcae7-a9a4-4b71-966e-9d70bccf29cd service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] No waiting events found dispatching network-vif-plugged-b8cb9bd3-0cdd-4183-b5f7-e26fe241764b {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1088.126247] env[68569]: WARNING nova.compute.manager [req-1ca3744c-4e5f-45da-9eec-63f934b83a37 req-534fcae7-a9a4-4b71-966e-9d70bccf29cd service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Received unexpected event network-vif-plugged-b8cb9bd3-0cdd-4183-b5f7-e26fe241764b for instance with vm_state building and task_state spawning. [ 1088.126247] env[68569]: DEBUG nova.compute.manager [req-1ca3744c-4e5f-45da-9eec-63f934b83a37 req-534fcae7-a9a4-4b71-966e-9d70bccf29cd service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Received event network-changed-b8cb9bd3-0cdd-4183-b5f7-e26fe241764b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1088.126247] env[68569]: DEBUG nova.compute.manager [req-1ca3744c-4e5f-45da-9eec-63f934b83a37 req-534fcae7-a9a4-4b71-966e-9d70bccf29cd service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Refreshing instance network info cache due to event network-changed-b8cb9bd3-0cdd-4183-b5f7-e26fe241764b. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1088.126247] env[68569]: DEBUG oslo_concurrency.lockutils [req-1ca3744c-4e5f-45da-9eec-63f934b83a37 req-534fcae7-a9a4-4b71-966e-9d70bccf29cd service nova] Acquiring lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.126247] env[68569]: DEBUG oslo_concurrency.lockutils [req-1ca3744c-4e5f-45da-9eec-63f934b83a37 req-534fcae7-a9a4-4b71-966e-9d70bccf29cd service nova] Acquired lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.126525] env[68569]: DEBUG nova.network.neutron [req-1ca3744c-4e5f-45da-9eec-63f934b83a37 req-534fcae7-a9a4-4b71-966e-9d70bccf29cd service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Refreshing network info cache for port b8cb9bd3-0cdd-4183-b5f7-e26fe241764b {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1088.197474] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52324d5e-c417-d6f1-8b64-fc7ca7a65c2d, 'name': SearchDatastore_Task, 'duration_secs': 0.015236} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.197999] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.198115] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1088.198267] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.198413] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.198589] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1088.198904] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b88b4bd-4a8f-4b4c-9dae-bf790403a07e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.208130] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1088.208330] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1088.209110] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-343add70-00be-4077-aa0b-e4b91c3ed8b8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.218018] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1088.218018] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c4b684-dbac-eb78-0527-9ec90ebd8a0e" [ 1088.218018] env[68569]: _type = "Task" [ 1088.218018] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.229049] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c4b684-dbac-eb78-0527-9ec90ebd8a0e, 'name': SearchDatastore_Task} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.229813] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7689cf4-ed9d-467d-8498-8a467585b20f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.235765] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1088.235765] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52af8bb1-3ae8-fcc0-d2de-97247a05559e" [ 1088.235765] env[68569]: _type = "Task" [ 1088.235765] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.246246] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52af8bb1-3ae8-fcc0-d2de-97247a05559e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.263351] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5962293d-51f6-446f-90b8-584368655a3a tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "5eb7ff96-3a9e-470d-9cbe-644446302ecf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.690s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.348548] env[68569]: DEBUG oslo_concurrency.lockutils [None req-43748b3f-17a1-4abe-8552-cf84eee16015 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.078s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.423786] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ba42bb25-3a84-43f3-98fc-6bc9833e431a tempest-DeleteServersTestJSON-1181782645 tempest-DeleteServersTestJSON-1181782645-project-member] Lock "d7a0631c-902d-4653-b900-2123de5bcb44" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.618s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.441208] env[68569]: DEBUG oslo_concurrency.lockutils [req-2aacb75a-39c8-4bbd-83d6-2c60298e486f req-706ca1a2-e3be-4bed-a10e-1730c5a07571 service nova] Releasing lock "refresh_cache-fd34691f-ebe5-4b40-994c-6278e09fc9eb" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.527487] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167682, 'name': ReconfigVM_Task, 'duration_secs': 0.443603} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.527645] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Reconfigured VM instance instance-0000005a to attach disk [datastore1] volume-c8148d10-8bad-49bc-93bf-559f83378a11/volume-c8148d10-8bad-49bc-93bf-559f83378a11.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1088.534787] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-886a7e45-397f-4380-aec6-7b3c2b4f5ee9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.550445] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1088.550445] env[68569]: value = "task-3167684" [ 1088.550445] env[68569]: _type = "Task" [ 1088.550445] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.560055] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167684, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.588526] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167683, 'name': CreateVM_Task, 'duration_secs': 0.370318} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.591232] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1088.592119] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.592282] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.592601] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1088.592864] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17c9b1e3-9d37-4adb-aecf-d026b5feeaa6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.597523] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1088.597523] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52abf7e6-49dd-02d7-6c66-f68b9a71ac8b" [ 1088.597523] env[68569]: _type = "Task" [ 1088.597523] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.608175] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52abf7e6-49dd-02d7-6c66-f68b9a71ac8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.634728] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be32e33-d224-48c4-a0a2-fa86253d6e15 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.641390] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206d9c71-593b-437f-903e-100e38eecc6a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.679168] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-823bc69b-b847-4035-941d-c3f546359820 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.687012] env[68569]: DEBUG nova.objects.instance [None req-246ab221-4d09-4d77-8ef7-603938805a3f tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lazy-loading 'flavor' on Instance uuid 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1088.689229] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bdc2a17-c1a2-4fc1-ac7a-a69d32a9f738 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.705640] env[68569]: DEBUG nova.compute.provider_tree [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.748115] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52af8bb1-3ae8-fcc0-d2de-97247a05559e, 'name': SearchDatastore_Task, 'duration_secs': 0.010156} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.748115] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.748345] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] fd34691f-ebe5-4b40-994c-6278e09fc9eb/fd34691f-ebe5-4b40-994c-6278e09fc9eb.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1088.748598] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6c6377a-aa0c-47a6-9b0b-10c84f32e552 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.755453] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1088.755453] env[68569]: value = "task-3167685" [ 1088.755453] env[68569]: _type = "Task" [ 1088.755453] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.763378] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167685, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.773862] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.774131] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.865131] env[68569]: DEBUG nova.network.neutron [req-1ca3744c-4e5f-45da-9eec-63f934b83a37 req-534fcae7-a9a4-4b71-966e-9d70bccf29cd service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Updated VIF entry in instance network info cache for port b8cb9bd3-0cdd-4183-b5f7-e26fe241764b. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1088.865480] env[68569]: DEBUG nova.network.neutron [req-1ca3744c-4e5f-45da-9eec-63f934b83a37 req-534fcae7-a9a4-4b71-966e-9d70bccf29cd service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Updating instance_info_cache with network_info: [{"id": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "address": "fa:16:3e:ec:47:b9", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8cb9bd3-0c", "ovs_interfaceid": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.915797] env[68569]: DEBUG oslo_concurrency.lockutils [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "5eb7ff96-3a9e-470d-9cbe-644446302ecf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.916058] env[68569]: DEBUG oslo_concurrency.lockutils [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "5eb7ff96-3a9e-470d-9cbe-644446302ecf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.916269] env[68569]: DEBUG oslo_concurrency.lockutils [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "5eb7ff96-3a9e-470d-9cbe-644446302ecf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.916451] env[68569]: DEBUG oslo_concurrency.lockutils [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "5eb7ff96-3a9e-470d-9cbe-644446302ecf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.916635] env[68569]: DEBUG oslo_concurrency.lockutils [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "5eb7ff96-3a9e-470d-9cbe-644446302ecf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.918664] env[68569]: INFO nova.compute.manager [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Terminating instance [ 1089.063521] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167684, 'name': ReconfigVM_Task, 'duration_secs': 0.239098} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.064040] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633698', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'name': 'volume-c8148d10-8bad-49bc-93bf-559f83378a11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3', 'attached_at': '', 'detached_at': '', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'serial': 'c8148d10-8bad-49bc-93bf-559f83378a11'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1089.064734] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac1be812-0369-4e70-9475-c236905e93e5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.073390] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1089.073390] env[68569]: value = "task-3167686" [ 1089.073390] env[68569]: _type = "Task" [ 1089.073390] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.086897] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167686, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.111082] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52abf7e6-49dd-02d7-6c66-f68b9a71ac8b, 'name': SearchDatastore_Task, 'duration_secs': 0.009478} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.111406] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.111658] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1089.111899] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.112053] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.112234] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1089.112516] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb5487ae-3d49-475b-ab1f-c97576420daf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.122913] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.123135] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1089.123874] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2e3c17e-7322-4f88-88e4-4c2b668a18f7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.131468] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1089.131468] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5237cb82-783f-fb18-04ae-f2429d0e4d1b" [ 1089.131468] env[68569]: _type = "Task" [ 1089.131468] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.139992] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5237cb82-783f-fb18-04ae-f2429d0e4d1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.197017] env[68569]: DEBUG oslo_concurrency.lockutils [None req-246ab221-4d09-4d77-8ef7-603938805a3f tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "refresh_cache-92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.197329] env[68569]: DEBUG oslo_concurrency.lockutils [None req-246ab221-4d09-4d77-8ef7-603938805a3f tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquired lock "refresh_cache-92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.198117] env[68569]: DEBUG nova.network.neutron [None req-246ab221-4d09-4d77-8ef7-603938805a3f tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1089.198117] env[68569]: DEBUG nova.objects.instance [None req-246ab221-4d09-4d77-8ef7-603938805a3f tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lazy-loading 'info_cache' on Instance uuid 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.206616] env[68569]: DEBUG nova.scheduler.client.report [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1089.265591] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167685, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491972} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.267019] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] fd34691f-ebe5-4b40-994c-6278e09fc9eb/fd34691f-ebe5-4b40-994c-6278e09fc9eb.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1089.267019] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1089.267019] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c47c27b5-3951-4aa4-a4c1-fb696fc95e68 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.272949] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1089.272949] env[68569]: value = "task-3167687" [ 1089.272949] env[68569]: _type = "Task" [ 1089.272949] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.276161] env[68569]: DEBUG nova.compute.manager [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1089.286024] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167687, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.369554] env[68569]: DEBUG oslo_concurrency.lockutils [req-1ca3744c-4e5f-45da-9eec-63f934b83a37 req-534fcae7-a9a4-4b71-966e-9d70bccf29cd service nova] Releasing lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.422726] env[68569]: DEBUG nova.compute.manager [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1089.422968] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1089.423880] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f00d54-1b2c-43fa-b2e8-2f50428cbe17 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.431573] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1089.431894] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ea31810-8c5b-417c-a7b2-40616e2e2dd7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.437526] env[68569]: DEBUG oslo_vmware.api [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1089.437526] env[68569]: value = "task-3167688" [ 1089.437526] env[68569]: _type = "Task" [ 1089.437526] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.447614] env[68569]: DEBUG oslo_vmware.api [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167688, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.584030] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167686, 'name': Rename_Task, 'duration_secs': 0.21042} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.584210] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1089.585256] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb020544-9103-4763-bb80-7777b4dda2a5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.590882] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1089.590882] env[68569]: value = "task-3167689" [ 1089.590882] env[68569]: _type = "Task" [ 1089.590882] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.598652] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167689, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.642831] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5237cb82-783f-fb18-04ae-f2429d0e4d1b, 'name': SearchDatastore_Task, 'duration_secs': 0.015817} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.643740] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fc7ddb5-2e48-43fc-b280-2ba612b7f369 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.649066] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1089.649066] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d08c1f-4ba3-ff50-4819-bd725f432f5d" [ 1089.649066] env[68569]: _type = "Task" [ 1089.649066] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.658128] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d08c1f-4ba3-ff50-4819-bd725f432f5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.701772] env[68569]: DEBUG nova.objects.base [None req-246ab221-4d09-4d77-8ef7-603938805a3f tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Object Instance<92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948> lazy-loaded attributes: flavor,info_cache {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1089.711542] env[68569]: DEBUG oslo_concurrency.lockutils [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.819s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.733722] env[68569]: INFO nova.scheduler.client.report [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleted allocations for instance 7c71799e-21d9-41f8-b35c-7117354a0287 [ 1089.787679] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167687, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066346} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.788188] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1089.789086] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6a32c7-c5d0-4962-8cad-eb4bd9e97104 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.814186] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] fd34691f-ebe5-4b40-994c-6278e09fc9eb/fd34691f-ebe5-4b40-994c-6278e09fc9eb.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1089.815316] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.815581] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.817156] env[68569]: INFO nova.compute.claims [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1089.819515] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0d3b080-9adf-4b23-bae6-b00df3739267 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.838747] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1089.838747] env[68569]: value = "task-3167690" [ 1089.838747] env[68569]: _type = "Task" [ 1089.838747] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.846923] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167690, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.947229] env[68569]: DEBUG oslo_vmware.api [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167688, 'name': PowerOffVM_Task, 'duration_secs': 0.183242} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.947493] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1089.947660] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1089.947917] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c97abd73-b6c8-4f6d-adc4-bf7d8c8e2097 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.013239] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1090.013548] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1090.013796] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleting the datastore file [datastore1] 5eb7ff96-3a9e-470d-9cbe-644446302ecf {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1090.014086] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-369b965f-5605-4a4e-8d04-db34548506a2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.020712] env[68569]: DEBUG oslo_vmware.api [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1090.020712] env[68569]: value = "task-3167692" [ 1090.020712] env[68569]: _type = "Task" [ 1090.020712] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.028218] env[68569]: DEBUG oslo_vmware.api [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167692, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.100530] env[68569]: DEBUG oslo_vmware.api [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167689, 'name': PowerOnVM_Task, 'duration_secs': 0.507782} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.100768] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1090.100945] env[68569]: DEBUG nova.compute.manager [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1090.101752] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40389f6d-f841-4b32-b67f-32e6bb9d6057 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.158977] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d08c1f-4ba3-ff50-4819-bd725f432f5d, 'name': SearchDatastore_Task, 'duration_secs': 0.016139} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.159291] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.159610] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] ee188712-b0e0-44ee-80b4-be72da32299f/ee188712-b0e0-44ee-80b4-be72da32299f.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1090.159871] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05fbee0a-d9ff-4430-9bc0-b0317299dc00 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.166104] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1090.166104] env[68569]: value = "task-3167693" [ 1090.166104] env[68569]: _type = "Task" [ 1090.166104] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.173778] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167693, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.242330] env[68569]: DEBUG oslo_concurrency.lockutils [None req-66844756-9c88-47ab-beb1-c66261de19a0 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "7c71799e-21d9-41f8-b35c-7117354a0287" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.388s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.357796] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167690, 'name': ReconfigVM_Task, 'duration_secs': 0.425822} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.357796] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Reconfigured VM instance instance-00000066 to attach disk [datastore2] fd34691f-ebe5-4b40-994c-6278e09fc9eb/fd34691f-ebe5-4b40-994c-6278e09fc9eb.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1090.357796] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb06cd05-dbd3-41af-919a-7f7973440a61 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.368286] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1090.368286] env[68569]: value = "task-3167694" [ 1090.368286] env[68569]: _type = "Task" [ 1090.368286] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.377295] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167694, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.482987] env[68569]: DEBUG nova.network.neutron [None req-246ab221-4d09-4d77-8ef7-603938805a3f tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Updating instance_info_cache with network_info: [{"id": "feea2419-1c39-4028-811f-c72311dae7a4", "address": "fa:16:3e:16:ea:60", "network": {"id": "9b574e65-367b-4135-8ce6-e40fd22bc9f3", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1543341439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "713d88f021794769a64eef3807ade9be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfeea2419-1c", "ovs_interfaceid": "feea2419-1c39-4028-811f-c72311dae7a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.532545] env[68569]: DEBUG oslo_vmware.api [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167692, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.265736} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.532883] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1090.533129] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1090.533326] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1090.533516] env[68569]: INFO nova.compute.manager [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1090.533816] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1090.534412] env[68569]: DEBUG nova.compute.manager [-] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1090.534780] env[68569]: DEBUG nova.network.neutron [-] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1090.620144] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.678929] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167693, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.877371] env[68569]: DEBUG nova.compute.manager [req-993cf05e-dd8d-408a-bf3f-52201c3fecf0 req-ac289e82-6db5-4d5f-919c-607e24cd808f service nova] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Received event network-vif-deleted-bb074230-39bf-4287-9375-e7e6b237a383 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1090.877615] env[68569]: INFO nova.compute.manager [req-993cf05e-dd8d-408a-bf3f-52201c3fecf0 req-ac289e82-6db5-4d5f-919c-607e24cd808f service nova] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Neutron deleted interface bb074230-39bf-4287-9375-e7e6b237a383; detaching it from the instance and deleting it from the info cache [ 1090.877724] env[68569]: DEBUG nova.network.neutron [req-993cf05e-dd8d-408a-bf3f-52201c3fecf0 req-ac289e82-6db5-4d5f-919c-607e24cd808f service nova] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.884409] env[68569]: DEBUG oslo_concurrency.lockutils [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.884637] env[68569]: DEBUG oslo_concurrency.lockutils [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.884835] env[68569]: DEBUG oslo_concurrency.lockutils [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.885029] env[68569]: DEBUG oslo_concurrency.lockutils [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.885313] env[68569]: DEBUG oslo_concurrency.lockutils [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.890468] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167694, 'name': Rename_Task, 'duration_secs': 0.219855} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.890888] env[68569]: INFO nova.compute.manager [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Terminating instance [ 1090.892159] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1090.895559] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c23e30ba-d15e-47fe-b01c-404050eea735 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.902608] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1090.902608] env[68569]: value = "task-3167695" [ 1090.902608] env[68569]: _type = "Task" [ 1090.902608] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.912239] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167695, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.986646] env[68569]: DEBUG oslo_concurrency.lockutils [None req-246ab221-4d09-4d77-8ef7-603938805a3f tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Releasing lock "refresh_cache-92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.023356] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe12f5d-120b-43ad-ba1d-08e999f996ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.030622] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e040031c-8d53-4a07-8bf8-8ff3640ee1c5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.063034] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e79cb7c-dd7a-407f-8f3a-ef1803f649b7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.070848] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff1be6b-8db6-43b6-96e6-b42fc4c47fd2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.084127] env[68569]: DEBUG nova.compute.provider_tree [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1091.176559] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167693, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527379} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.176853] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] ee188712-b0e0-44ee-80b4-be72da32299f/ee188712-b0e0-44ee-80b4-be72da32299f.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1091.177077] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1091.177321] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc926960-752e-40da-9aa6-248208b3891b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.183885] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1091.183885] env[68569]: value = "task-3167696" [ 1091.183885] env[68569]: _type = "Task" [ 1091.183885] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.190931] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167696, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.314053] env[68569]: DEBUG nova.network.neutron [-] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.384024] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39d8336d-ac9b-4e0b-ae0c-aace06d91830 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.394068] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e2c20e-9821-4903-a2b7-7019fbf9f84f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.404729] env[68569]: DEBUG nova.compute.manager [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1091.404937] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1091.406086] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a36af8-424f-46a6-9e22-1b2490718abe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.419022] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167695, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.419022] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1091.419644] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06b16199-3ac8-4cd6-b0a3-f7d367a0940f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.434335] env[68569]: DEBUG nova.compute.manager [req-993cf05e-dd8d-408a-bf3f-52201c3fecf0 req-ac289e82-6db5-4d5f-919c-607e24cd808f service nova] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Detach interface failed, port_id=bb074230-39bf-4287-9375-e7e6b237a383, reason: Instance 5eb7ff96-3a9e-470d-9cbe-644446302ecf could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1091.436310] env[68569]: DEBUG oslo_vmware.api [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1091.436310] env[68569]: value = "task-3167697" [ 1091.436310] env[68569]: _type = "Task" [ 1091.436310] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.444508] env[68569]: DEBUG oslo_vmware.api [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167697, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.587190] env[68569]: DEBUG nova.scheduler.client.report [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1091.693508] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167696, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069658} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.694862] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1091.694862] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f10579c-a343-4e63-8d96-bc1daea9a828 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.715564] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] ee188712-b0e0-44ee-80b4-be72da32299f/ee188712-b0e0-44ee-80b4-be72da32299f.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1091.715789] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00496289-82f4-408c-9069-97472e3032cb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.735862] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1091.735862] env[68569]: value = "task-3167698" [ 1091.735862] env[68569]: _type = "Task" [ 1091.735862] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.743607] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167698, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.816318] env[68569]: INFO nova.compute.manager [-] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Took 1.28 seconds to deallocate network for instance. [ 1091.915637] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167695, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.945882] env[68569]: DEBUG oslo_vmware.api [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167697, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.996704] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-246ab221-4d09-4d77-8ef7-603938805a3f tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1091.997097] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1855ec40-b94a-4def-b556-ce57517cdf59 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.003468] env[68569]: DEBUG oslo_vmware.api [None req-246ab221-4d09-4d77-8ef7-603938805a3f tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1092.003468] env[68569]: value = "task-3167699" [ 1092.003468] env[68569]: _type = "Task" [ 1092.003468] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.010847] env[68569]: DEBUG oslo_vmware.api [None req-246ab221-4d09-4d77-8ef7-603938805a3f tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167699, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.092503] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.277s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.093344] env[68569]: DEBUG nova.compute.manager [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1092.097821] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.478s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.098163] env[68569]: DEBUG nova.objects.instance [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68569) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1092.246420] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167698, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.324610] env[68569]: DEBUG oslo_concurrency.lockutils [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.418231] env[68569]: DEBUG oslo_vmware.api [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167695, 'name': PowerOnVM_Task, 'duration_secs': 1.252295} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.418572] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1092.418833] env[68569]: INFO nova.compute.manager [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Took 11.48 seconds to spawn the instance on the hypervisor. [ 1092.419105] env[68569]: DEBUG nova.compute.manager [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1092.420182] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc86972-d66b-4fd5-9dda-c344106b0d1d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.448266] env[68569]: DEBUG oslo_vmware.api [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167697, 'name': PowerOffVM_Task, 'duration_secs': 0.694228} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.448590] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1092.448809] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1092.449136] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-530d3688-4c43-4ac1-a163-188a6dce2995 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.197537] env[68569]: DEBUG nova.compute.utils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1093.209437] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1093.209732] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1093.209975] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleting the datastore file [datastore1] 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1093.211104] env[68569]: INFO nova.compute.manager [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Took 28.37 seconds to build instance. [ 1093.215973] env[68569]: DEBUG nova.compute.manager [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1093.216189] env[68569]: DEBUG nova.network.neutron [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1093.222023] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a82e508-0559-403f-8947-031afd71ff97 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.238663] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167698, 'name': ReconfigVM_Task, 'duration_secs': 0.93875} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.238983] env[68569]: DEBUG oslo_vmware.api [None req-246ab221-4d09-4d77-8ef7-603938805a3f tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167699, 'name': PowerOnVM_Task, 'duration_secs': 0.415074} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.240747] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Reconfigured VM instance instance-00000067 to attach disk [datastore1] ee188712-b0e0-44ee-80b4-be72da32299f/ee188712-b0e0-44ee-80b4-be72da32299f.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1093.242049] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-246ab221-4d09-4d77-8ef7-603938805a3f tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1093.242422] env[68569]: DEBUG nova.compute.manager [None req-246ab221-4d09-4d77-8ef7-603938805a3f tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1093.242914] env[68569]: DEBUG oslo_vmware.api [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1093.242914] env[68569]: value = "task-3167701" [ 1093.242914] env[68569]: _type = "Task" [ 1093.242914] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.243318] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72a53c79-aeaa-45a0-bccb-db249598af92 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.246337] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f658958-75b6-44dc-b794-84a01b5212b8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.267790] env[68569]: DEBUG oslo_vmware.api [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167701, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.268595] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1093.268595] env[68569]: value = "task-3167702" [ 1093.268595] env[68569]: _type = "Task" [ 1093.268595] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.279414] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167702, 'name': Rename_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.281259] env[68569]: DEBUG nova.policy [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5038eb62813e43d0b35a7ff07cdd62da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f5444e64380448bac041e3c4fd57865', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1093.573573] env[68569]: DEBUG nova.network.neutron [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Successfully created port: 8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1093.700451] env[68569]: DEBUG nova.compute.manager [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1093.711201] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "fd34691f-ebe5-4b40-994c-6278e09fc9eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.712668] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e1b28eab-5648-4ba7-834b-218c58736461 tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "fd34691f-ebe5-4b40-994c-6278e09fc9eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.873s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.713618] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2cfe6374-a547-4b99-b298-f712e4cff056 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.616s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.714614] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "fd34691f-ebe5-4b40-994c-6278e09fc9eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.003s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.714852] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "fd34691f-ebe5-4b40-994c-6278e09fc9eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.715118] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "fd34691f-ebe5-4b40-994c-6278e09fc9eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.715319] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "fd34691f-ebe5-4b40-994c-6278e09fc9eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.717314] env[68569]: INFO nova.compute.manager [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Terminating instance [ 1093.721678] env[68569]: DEBUG oslo_concurrency.lockutils [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.397s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.721935] env[68569]: DEBUG nova.objects.instance [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lazy-loading 'resources' on Instance uuid 5eb7ff96-3a9e-470d-9cbe-644446302ecf {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.759795] env[68569]: DEBUG oslo_vmware.api [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167701, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150162} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.760052] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1093.760239] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1093.760410] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1093.760581] env[68569]: INFO nova.compute.manager [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Took 2.36 seconds to destroy the instance on the hypervisor. [ 1093.760922] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1093.761149] env[68569]: DEBUG nova.compute.manager [-] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1093.761245] env[68569]: DEBUG nova.network.neutron [-] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1093.781472] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167702, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.125042] env[68569]: DEBUG nova.compute.manager [req-df090a0f-d5c0-4901-84ed-8866f5bcd315 req-dae2e1ff-ce10-4499-9caf-91fb48988ffd service nova] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Received event network-vif-deleted-b3f706dd-a754-40c2-ba12-9cfef3e9e2c1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1094.125265] env[68569]: INFO nova.compute.manager [req-df090a0f-d5c0-4901-84ed-8866f5bcd315 req-dae2e1ff-ce10-4499-9caf-91fb48988ffd service nova] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Neutron deleted interface b3f706dd-a754-40c2-ba12-9cfef3e9e2c1; detaching it from the instance and deleting it from the info cache [ 1094.125438] env[68569]: DEBUG nova.network.neutron [req-df090a0f-d5c0-4901-84ed-8866f5bcd315 req-dae2e1ff-ce10-4499-9caf-91fb48988ffd service nova] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.224050] env[68569]: DEBUG nova.compute.manager [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1094.224345] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1094.225149] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6007192c-e45e-4e42-b67c-33b20ca8b33d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.234893] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1094.235139] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9993d5d-2aa6-47f8-a4e0-04397755b71a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.242357] env[68569]: DEBUG oslo_vmware.api [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1094.242357] env[68569]: value = "task-3167703" [ 1094.242357] env[68569]: _type = "Task" [ 1094.242357] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.249620] env[68569]: DEBUG oslo_vmware.api [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167703, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.282459] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167702, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.391715] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba93600-8b47-45e9-b7e0-6a9c391b5741 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.399062] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2080a02e-a3a0-4dd3-b847-9956c969c857 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.427494] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0111c2b5-eb09-44fc-8be5-86887d89be30 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.435135] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3554bdc-bc12-46ee-bbe9-247f76f3cbfc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.449553] env[68569]: DEBUG nova.compute.provider_tree [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.609584] env[68569]: DEBUG nova.network.neutron [-] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.628945] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-de0328ee-d512-4e56-aea8-5bdccb89915f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.638371] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c128cec-f5ab-472f-a504-ee5f19aa656a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.671478] env[68569]: DEBUG nova.compute.manager [req-df090a0f-d5c0-4901-84ed-8866f5bcd315 req-dae2e1ff-ce10-4499-9caf-91fb48988ffd service nova] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Detach interface failed, port_id=b3f706dd-a754-40c2-ba12-9cfef3e9e2c1, reason: Instance 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1094.709718] env[68569]: DEBUG nova.compute.manager [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1094.735399] env[68569]: DEBUG nova.virt.hardware [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1094.735609] env[68569]: DEBUG nova.virt.hardware [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1094.735769] env[68569]: DEBUG nova.virt.hardware [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1094.735962] env[68569]: DEBUG nova.virt.hardware [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1094.736118] env[68569]: DEBUG nova.virt.hardware [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1094.736283] env[68569]: DEBUG nova.virt.hardware [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1094.736502] env[68569]: DEBUG nova.virt.hardware [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1094.736846] env[68569]: DEBUG nova.virt.hardware [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1094.736846] env[68569]: DEBUG nova.virt.hardware [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1094.737211] env[68569]: DEBUG nova.virt.hardware [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1094.737211] env[68569]: DEBUG nova.virt.hardware [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1094.738045] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501d6101-a2f4-4236-ac84-8a9d0b49f19e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.749133] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febcb799-8982-4eb4-b80d-2706b513a4a2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.757592] env[68569]: DEBUG oslo_vmware.api [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167703, 'name': PowerOffVM_Task, 'duration_secs': 0.351247} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.765348] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1094.765555] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1094.766008] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb76b163-c0fc-46a2-bf48-5be9a69f36d9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.781538] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167702, 'name': Rename_Task, 'duration_secs': 1.232801} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.781810] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1094.782049] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5739ecd-ca04-4a5b-b46c-34951a69a244 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.787333] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1094.787333] env[68569]: value = "task-3167705" [ 1094.787333] env[68569]: _type = "Task" [ 1094.787333] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.794603] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167705, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.871053] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1094.871345] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1094.871543] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Deleting the datastore file [datastore2] fd34691f-ebe5-4b40-994c-6278e09fc9eb {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1094.871811] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ea12464-92c8-45b0-9fe5-3f941d0ee3e6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.877980] env[68569]: DEBUG oslo_vmware.api [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for the task: (returnval){ [ 1094.877980] env[68569]: value = "task-3167706" [ 1094.877980] env[68569]: _type = "Task" [ 1094.877980] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.886501] env[68569]: DEBUG oslo_vmware.api [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167706, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.953191] env[68569]: DEBUG nova.scheduler.client.report [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1094.962438] env[68569]: DEBUG nova.compute.manager [req-63519a21-093b-47ad-a011-b23f40b6fc65 req-f02df309-d5c5-4de7-b0a3-f7e72c01e1cd service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Received event network-vif-plugged-8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1094.962751] env[68569]: DEBUG oslo_concurrency.lockutils [req-63519a21-093b-47ad-a011-b23f40b6fc65 req-f02df309-d5c5-4de7-b0a3-f7e72c01e1cd service nova] Acquiring lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.963037] env[68569]: DEBUG oslo_concurrency.lockutils [req-63519a21-093b-47ad-a011-b23f40b6fc65 req-f02df309-d5c5-4de7-b0a3-f7e72c01e1cd service nova] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.963227] env[68569]: DEBUG oslo_concurrency.lockutils [req-63519a21-093b-47ad-a011-b23f40b6fc65 req-f02df309-d5c5-4de7-b0a3-f7e72c01e1cd service nova] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.963439] env[68569]: DEBUG nova.compute.manager [req-63519a21-093b-47ad-a011-b23f40b6fc65 req-f02df309-d5c5-4de7-b0a3-f7e72c01e1cd service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] No waiting events found dispatching network-vif-plugged-8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1094.963637] env[68569]: WARNING nova.compute.manager [req-63519a21-093b-47ad-a011-b23f40b6fc65 req-f02df309-d5c5-4de7-b0a3-f7e72c01e1cd service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Received unexpected event network-vif-plugged-8b05f57b-2ff2-49af-8333-0047f5230208 for instance with vm_state building and task_state spawning. [ 1095.066111] env[68569]: DEBUG nova.network.neutron [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Successfully updated port: 8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1095.112486] env[68569]: INFO nova.compute.manager [-] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Took 1.35 seconds to deallocate network for instance. [ 1095.296837] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167705, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.387836] env[68569]: DEBUG oslo_vmware.api [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Task: {'id': task-3167706, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185139} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.388752] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1095.388922] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1095.389129] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1095.389305] env[68569]: INFO nova.compute.manager [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1095.389635] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1095.389850] env[68569]: DEBUG nova.compute.manager [-] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1095.389942] env[68569]: DEBUG nova.network.neutron [-] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1095.459518] env[68569]: DEBUG oslo_concurrency.lockutils [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.738s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.483669] env[68569]: INFO nova.scheduler.client.report [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleted allocations for instance 5eb7ff96-3a9e-470d-9cbe-644446302ecf [ 1095.569317] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.569463] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.569723] env[68569]: DEBUG nova.network.neutron [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1095.619949] env[68569]: DEBUG oslo_concurrency.lockutils [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.620319] env[68569]: DEBUG oslo_concurrency.lockutils [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.620549] env[68569]: DEBUG nova.objects.instance [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lazy-loading 'resources' on Instance uuid 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1095.797557] env[68569]: DEBUG oslo_vmware.api [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167705, 'name': PowerOnVM_Task, 'duration_secs': 0.874539} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.797821] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1095.798034] env[68569]: INFO nova.compute.manager [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Took 10.67 seconds to spawn the instance on the hypervisor. [ 1095.798216] env[68569]: DEBUG nova.compute.manager [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1095.799267] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71777006-8373-4e67-bb44-326f5dd93193 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.991838] env[68569]: DEBUG oslo_concurrency.lockutils [None req-257a6f17-043b-4f48-8adc-78d8b5963593 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "5eb7ff96-3a9e-470d-9cbe-644446302ecf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.076s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.102701] env[68569]: DEBUG nova.network.neutron [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1096.225073] env[68569]: DEBUG nova.network.neutron [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Updating instance_info_cache with network_info: [{"id": "8b05f57b-2ff2-49af-8333-0047f5230208", "address": "fa:16:3e:54:80:53", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b05f57b-2f", "ovs_interfaceid": "8b05f57b-2ff2-49af-8333-0047f5230208", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.259248] env[68569]: DEBUG nova.network.neutron [-] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.287546] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931be742-6077-4c93-9b30-d94ffdb68dcc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.295724] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170bcde0-e2a5-4d84-a981-c594aeca179e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.332720] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1cc7d1d-24d9-4f38-a4a5-ba0d86645233 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.339220] env[68569]: INFO nova.compute.manager [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Took 25.84 seconds to build instance. [ 1096.343803] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b23da32-9388-49e9-8cb5-d8c15b2005b3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.358239] env[68569]: DEBUG nova.compute.provider_tree [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1096.720823] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "671f7e6d-703e-48a9-8509-2a8924afe911" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.721124] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "671f7e6d-703e-48a9-8509-2a8924afe911" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.721338] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "671f7e6d-703e-48a9-8509-2a8924afe911-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.721522] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "671f7e6d-703e-48a9-8509-2a8924afe911-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.721686] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "671f7e6d-703e-48a9-8509-2a8924afe911-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.723699] env[68569]: INFO nova.compute.manager [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Terminating instance [ 1096.729528] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.729809] env[68569]: DEBUG nova.compute.manager [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Instance network_info: |[{"id": "8b05f57b-2ff2-49af-8333-0047f5230208", "address": "fa:16:3e:54:80:53", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b05f57b-2f", "ovs_interfaceid": "8b05f57b-2ff2-49af-8333-0047f5230208", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1096.730204] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:80:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b05f57b-2ff2-49af-8333-0047f5230208', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1096.739792] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Creating folder: Project (6f5444e64380448bac041e3c4fd57865). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1096.740532] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e9219bf4-41d4-4b79-a1f1-a708822a7aef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.751174] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Created folder: Project (6f5444e64380448bac041e3c4fd57865) in parent group-v633430. [ 1096.751381] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Creating folder: Instances. Parent ref: group-v633707. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1096.751620] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-014fd589-3979-4669-ba4d-70776b27cea0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.760327] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Created folder: Instances in parent group-v633707. [ 1096.760541] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1096.760716] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1096.761112] env[68569]: INFO nova.compute.manager [-] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Took 1.37 seconds to deallocate network for instance. [ 1096.761299] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ff1bc62-247c-4216-9440-615ab2fd779e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.783085] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1096.783085] env[68569]: value = "task-3167709" [ 1096.783085] env[68569]: _type = "Task" [ 1096.783085] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.790635] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167709, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.849850] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e27ffe12-e7d1-461d-810f-c3aa87fbcd4d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "ee188712-b0e0-44ee-80b4-be72da32299f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.364s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.861342] env[68569]: DEBUG nova.scheduler.client.report [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1096.919462] env[68569]: DEBUG nova.compute.manager [req-a71d387a-15da-47b9-8457-e04a05552304 req-2e4a36b3-54f2-4b06-a242-9a33a4930a25 service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Received event network-changed-b8cb9bd3-0cdd-4183-b5f7-e26fe241764b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1096.919462] env[68569]: DEBUG nova.compute.manager [req-a71d387a-15da-47b9-8457-e04a05552304 req-2e4a36b3-54f2-4b06-a242-9a33a4930a25 service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Refreshing instance network info cache due to event network-changed-b8cb9bd3-0cdd-4183-b5f7-e26fe241764b. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1096.919462] env[68569]: DEBUG oslo_concurrency.lockutils [req-a71d387a-15da-47b9-8457-e04a05552304 req-2e4a36b3-54f2-4b06-a242-9a33a4930a25 service nova] Acquiring lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.919462] env[68569]: DEBUG oslo_concurrency.lockutils [req-a71d387a-15da-47b9-8457-e04a05552304 req-2e4a36b3-54f2-4b06-a242-9a33a4930a25 service nova] Acquired lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.920280] env[68569]: DEBUG nova.network.neutron [req-a71d387a-15da-47b9-8457-e04a05552304 req-2e4a36b3-54f2-4b06-a242-9a33a4930a25 service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Refreshing network info cache for port b8cb9bd3-0cdd-4183-b5f7-e26fe241764b {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1097.046503] env[68569]: DEBUG nova.compute.manager [req-8247582b-a33b-4037-b6ab-336b071d0efd req-2263e99f-c6a3-4367-9bd6-f0d2e215d4f5 service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Received event network-changed-8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1097.046740] env[68569]: DEBUG nova.compute.manager [req-8247582b-a33b-4037-b6ab-336b071d0efd req-2263e99f-c6a3-4367-9bd6-f0d2e215d4f5 service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Refreshing instance network info cache due to event network-changed-8b05f57b-2ff2-49af-8333-0047f5230208. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1097.046959] env[68569]: DEBUG oslo_concurrency.lockutils [req-8247582b-a33b-4037-b6ab-336b071d0efd req-2263e99f-c6a3-4367-9bd6-f0d2e215d4f5 service nova] Acquiring lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.047117] env[68569]: DEBUG oslo_concurrency.lockutils [req-8247582b-a33b-4037-b6ab-336b071d0efd req-2263e99f-c6a3-4367-9bd6-f0d2e215d4f5 service nova] Acquired lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.047269] env[68569]: DEBUG nova.network.neutron [req-8247582b-a33b-4037-b6ab-336b071d0efd req-2263e99f-c6a3-4367-9bd6-f0d2e215d4f5 service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Refreshing network info cache for port 8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1097.183916] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "24a99398-105c-403f-8921-939beb40d447" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.184154] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "24a99398-105c-403f-8921-939beb40d447" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.226820] env[68569]: DEBUG nova.compute.manager [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1097.227073] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1097.228196] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53bc1486-8593-4785-a18e-c0eb4afdc94c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.236013] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1097.236245] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd10a928-bc77-400c-89e9-99d8c3383436 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.242276] env[68569]: DEBUG oslo_vmware.api [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1097.242276] env[68569]: value = "task-3167710" [ 1097.242276] env[68569]: _type = "Task" [ 1097.242276] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.249780] env[68569]: DEBUG oslo_vmware.api [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167710, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.281494] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.292580] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167709, 'name': CreateVM_Task, 'duration_secs': 0.364311} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.293365] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1097.293658] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.293837] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.294179] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1097.294440] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16cec00b-c6bd-49c0-aa0a-432c6cd1308e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.299288] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1097.299288] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d64fb3-0d4d-0188-d0e1-89780deaedd6" [ 1097.299288] env[68569]: _type = "Task" [ 1097.299288] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.307629] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d64fb3-0d4d-0188-d0e1-89780deaedd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.367680] env[68569]: DEBUG oslo_concurrency.lockutils [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.747s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.370141] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.089s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.370418] env[68569]: DEBUG nova.objects.instance [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lazy-loading 'resources' on Instance uuid fd34691f-ebe5-4b40-994c-6278e09fc9eb {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1097.389632] env[68569]: INFO nova.scheduler.client.report [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleted allocations for instance 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3 [ 1097.646674] env[68569]: DEBUG nova.network.neutron [req-a71d387a-15da-47b9-8457-e04a05552304 req-2e4a36b3-54f2-4b06-a242-9a33a4930a25 service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Updated VIF entry in instance network info cache for port b8cb9bd3-0cdd-4183-b5f7-e26fe241764b. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1097.647365] env[68569]: DEBUG nova.network.neutron [req-a71d387a-15da-47b9-8457-e04a05552304 req-2e4a36b3-54f2-4b06-a242-9a33a4930a25 service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Updating instance_info_cache with network_info: [{"id": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "address": "fa:16:3e:ec:47:b9", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8cb9bd3-0c", "ovs_interfaceid": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.686609] env[68569]: DEBUG nova.compute.manager [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1097.753130] env[68569]: DEBUG oslo_vmware.api [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167710, 'name': PowerOffVM_Task, 'duration_secs': 0.238725} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.753976] env[68569]: DEBUG nova.network.neutron [req-8247582b-a33b-4037-b6ab-336b071d0efd req-2263e99f-c6a3-4367-9bd6-f0d2e215d4f5 service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Updated VIF entry in instance network info cache for port 8b05f57b-2ff2-49af-8333-0047f5230208. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1097.754364] env[68569]: DEBUG nova.network.neutron [req-8247582b-a33b-4037-b6ab-336b071d0efd req-2263e99f-c6a3-4367-9bd6-f0d2e215d4f5 service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Updating instance_info_cache with network_info: [{"id": "8b05f57b-2ff2-49af-8333-0047f5230208", "address": "fa:16:3e:54:80:53", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b05f57b-2f", "ovs_interfaceid": "8b05f57b-2ff2-49af-8333-0047f5230208", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.755531] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1097.755707] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1097.756173] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7df0a88d-1787-4942-8d69-cc7326c6dd1e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.809123] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d64fb3-0d4d-0188-d0e1-89780deaedd6, 'name': SearchDatastore_Task, 'duration_secs': 0.011924} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.809416] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.809692] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1097.809944] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.810118] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.810292] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1097.810549] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-695c1b31-2b9b-4d49-859d-1aaebd4bfcc7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.818966] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1097.819145] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1097.819875] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7379f383-ac92-42f2-a6b9-fca738d22f42 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.826750] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1097.826750] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52dd065c-f6af-cb8e-4656-8a4331c6fbda" [ 1097.826750] env[68569]: _type = "Task" [ 1097.826750] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.834362] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52dd065c-f6af-cb8e-4656-8a4331c6fbda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.901204] env[68569]: DEBUG oslo_concurrency.lockutils [None req-616301ed-de2e-47de-9220-9976de6a01c1 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.016s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.044710] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942d293e-7acf-415b-a07d-a50f4f910543 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.052678] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476a16ea-c91a-4032-9021-92edabb7ace6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.082948] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c8f5cf-046e-4a09-bb06-8ec1417613e0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.091030] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c98a88-652c-4174-9cdb-e20e596014b4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.103676] env[68569]: DEBUG nova.compute.provider_tree [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1098.149777] env[68569]: DEBUG oslo_concurrency.lockutils [req-a71d387a-15da-47b9-8457-e04a05552304 req-2e4a36b3-54f2-4b06-a242-9a33a4930a25 service nova] Releasing lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.207855] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.257317] env[68569]: DEBUG oslo_concurrency.lockutils [req-8247582b-a33b-4037-b6ab-336b071d0efd req-2263e99f-c6a3-4367-9bd6-f0d2e215d4f5 service nova] Releasing lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.257738] env[68569]: DEBUG nova.compute.manager [req-8247582b-a33b-4037-b6ab-336b071d0efd req-2263e99f-c6a3-4367-9bd6-f0d2e215d4f5 service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Received event network-vif-deleted-970bdc81-4392-467c-8891-9dd4c15c0379 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1098.258010] env[68569]: DEBUG nova.compute.manager [req-8247582b-a33b-4037-b6ab-336b071d0efd req-2263e99f-c6a3-4367-9bd6-f0d2e215d4f5 service nova] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Received event network-vif-deleted-d3de7a89-6fef-43c5-8fe1-1f6e07a715ee {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1098.265015] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1098.265314] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1098.265552] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleting the datastore file [datastore2] 671f7e6d-703e-48a9-8509-2a8924afe911 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1098.265881] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e210a0d7-f78e-4db4-bdc9-8c84b5441e71 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.273179] env[68569]: DEBUG oslo_vmware.api [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1098.273179] env[68569]: value = "task-3167712" [ 1098.273179] env[68569]: _type = "Task" [ 1098.273179] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.283307] env[68569]: DEBUG oslo_vmware.api [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.337347] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52dd065c-f6af-cb8e-4656-8a4331c6fbda, 'name': SearchDatastore_Task, 'duration_secs': 0.010771} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.338287] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34b8a471-c046-44f3-bf0f-4e80e9bca18a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.344070] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1098.344070] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52eccffe-96b9-b762-7b35-e9332bb0e50a" [ 1098.344070] env[68569]: _type = "Task" [ 1098.344070] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.351964] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52eccffe-96b9-b762-7b35-e9332bb0e50a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.608127] env[68569]: DEBUG nova.scheduler.client.report [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1098.784634] env[68569]: DEBUG oslo_vmware.api [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167712, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.236622} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.784869] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1098.785054] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1098.785226] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1098.785391] env[68569]: INFO nova.compute.manager [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Took 1.56 seconds to destroy the instance on the hypervisor. [ 1098.785626] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1098.785805] env[68569]: DEBUG nova.compute.manager [-] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1098.785897] env[68569]: DEBUG nova.network.neutron [-] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1098.857875] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52eccffe-96b9-b762-7b35-e9332bb0e50a, 'name': SearchDatastore_Task, 'duration_secs': 0.010827} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.860934] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.861312] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a/6dbe8a18-c317-4b36-bd6f-922ce9f85b6a.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1098.861664] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f6310cac-13ed-4453-99b8-dd7d1bcd1692 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.870198] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1098.870198] env[68569]: value = "task-3167713" [ 1098.870198] env[68569]: _type = "Task" [ 1098.870198] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.880958] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167713, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.069153] env[68569]: DEBUG nova.compute.manager [req-c87491e5-1424-4412-9ed0-5e12d3f44eb9 req-65b3430b-816b-4c74-bf61-aa2aa7970638 service nova] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Received event network-vif-deleted-642bbbb7-8eeb-4920-bfce-44e3f938dddb {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1099.069472] env[68569]: INFO nova.compute.manager [req-c87491e5-1424-4412-9ed0-5e12d3f44eb9 req-65b3430b-816b-4c74-bf61-aa2aa7970638 service nova] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Neutron deleted interface 642bbbb7-8eeb-4920-bfce-44e3f938dddb; detaching it from the instance and deleting it from the info cache [ 1099.070060] env[68569]: DEBUG nova.network.neutron [req-c87491e5-1424-4412-9ed0-5e12d3f44eb9 req-65b3430b-816b-4c74-bf61-aa2aa7970638 service nova] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.112143] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.742s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.114572] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.907s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.116127] env[68569]: INFO nova.compute.claims [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1099.132046] env[68569]: INFO nova.scheduler.client.report [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Deleted allocations for instance fd34691f-ebe5-4b40-994c-6278e09fc9eb [ 1099.182201] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "415b9040-15c3-472f-b427-c39e2c456764" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.182528] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "415b9040-15c3-472f-b427-c39e2c456764" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.380751] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167713, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.548634] env[68569]: DEBUG nova.network.neutron [-] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.573087] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb4f1c6a-8c92-48b5-a91d-fcffee944852 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.583224] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3049dfb-14fa-4a69-a2f0-01fae9f5f076 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.630577] env[68569]: DEBUG nova.compute.manager [req-c87491e5-1424-4412-9ed0-5e12d3f44eb9 req-65b3430b-816b-4c74-bf61-aa2aa7970638 service nova] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Detach interface failed, port_id=642bbbb7-8eeb-4920-bfce-44e3f938dddb, reason: Instance 671f7e6d-703e-48a9-8509-2a8924afe911 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1099.641893] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20c4a869-40fd-4d01-9738-10198360858e tempest-ServersTestMultiNic-1802507971 tempest-ServersTestMultiNic-1802507971-project-member] Lock "fd34691f-ebe5-4b40-994c-6278e09fc9eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.927s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.685008] env[68569]: DEBUG nova.compute.manager [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1099.881628] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167713, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518725} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.881885] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a/6dbe8a18-c317-4b36-bd6f-922ce9f85b6a.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1099.882099] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1099.882357] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2238e6d0-2ca0-4241-91f6-1da7eb167e4f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.889485] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1099.889485] env[68569]: value = "task-3167714" [ 1099.889485] env[68569]: _type = "Task" [ 1099.889485] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.899529] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167714, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.052743] env[68569]: INFO nova.compute.manager [-] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Took 1.27 seconds to deallocate network for instance. [ 1100.207937] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.313716] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c5272a-7721-4f28-a0fb-9c67724e4fad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.323392] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a719fd4-a6eb-4574-ad03-87cbc045f24b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.372943] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b715d5-6aae-4ec5-b949-24c14041b4b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.383904] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6875916-0db2-49e1-a4be-2332d0448d75 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.403382] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167714, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.224309} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.416114] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1100.416897] env[68569]: DEBUG nova.compute.provider_tree [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.422021] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669d2adc-0e6f-4f50-b47b-84c94ee0d398 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.458630] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a/6dbe8a18-c317-4b36-bd6f-922ce9f85b6a.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1100.460211] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61137088-c09c-4a01-bdc2-6b2d810d5bfe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.485361] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1100.485361] env[68569]: value = "task-3167715" [ 1100.485361] env[68569]: _type = "Task" [ 1100.485361] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.495366] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167715, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.560831] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.921214] env[68569]: DEBUG nova.scheduler.client.report [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1100.997243] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167715, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.426520] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.312s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.427073] env[68569]: DEBUG nova.compute.manager [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1101.429679] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.222s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.431405] env[68569]: INFO nova.compute.claims [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1101.501762] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167715, 'name': ReconfigVM_Task, 'duration_secs': 0.667853} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.501762] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a/6dbe8a18-c317-4b36-bd6f-922ce9f85b6a.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1101.501762] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5cf92778-4064-4527-93d7-f877363b8892 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.507819] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1101.507819] env[68569]: value = "task-3167716" [ 1101.507819] env[68569]: _type = "Task" [ 1101.507819] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.516874] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167716, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.938672] env[68569]: DEBUG nova.compute.utils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1101.940282] env[68569]: DEBUG nova.compute.manager [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1101.940460] env[68569]: DEBUG nova.network.neutron [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1101.979360] env[68569]: DEBUG nova.policy [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '868bbe891585423f85374f6dffdc7813', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62ff17f9dcc242e0aff061402e57bdcd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1102.020976] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167716, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.229061] env[68569]: DEBUG nova.network.neutron [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Successfully created port: 35bc7caa-5b98-48e6-92f2-3dbb6c559faf {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1102.444586] env[68569]: DEBUG nova.compute.manager [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1102.521824] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167716, 'name': Rename_Task, 'duration_secs': 0.69514} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.521824] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1102.521824] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44438122-9014-4bf6-8dc1-4673c6d3d7cd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.527458] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1102.527458] env[68569]: value = "task-3167717" [ 1102.527458] env[68569]: _type = "Task" [ 1102.527458] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.535926] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167717, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.629522] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff49e762-8b30-4581-b2f0-776150e1a8a0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.636719] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7747c7c-18c4-4335-9e24-4fd5d5445ead {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.666571] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a4da25-8f84-44bc-a8b9-befb7a8ff300 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.674048] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87299b4-da4d-42dd-896f-9ac295020a4a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.687063] env[68569]: DEBUG nova.compute.provider_tree [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1103.037709] env[68569]: DEBUG oslo_vmware.api [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167717, 'name': PowerOnVM_Task, 'duration_secs': 0.467275} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.038017] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1103.038211] env[68569]: INFO nova.compute.manager [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Took 8.33 seconds to spawn the instance on the hypervisor. [ 1103.038429] env[68569]: DEBUG nova.compute.manager [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1103.039190] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fb7136-5ccf-4f2b-9419-19106f0fd145 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.190477] env[68569]: DEBUG nova.scheduler.client.report [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1103.455435] env[68569]: DEBUG nova.compute.manager [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1103.484835] env[68569]: DEBUG nova.virt.hardware [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1103.485225] env[68569]: DEBUG nova.virt.hardware [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1103.485274] env[68569]: DEBUG nova.virt.hardware [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1103.485420] env[68569]: DEBUG nova.virt.hardware [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1103.485564] env[68569]: DEBUG nova.virt.hardware [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1103.485707] env[68569]: DEBUG nova.virt.hardware [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1103.485906] env[68569]: DEBUG nova.virt.hardware [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1103.486073] env[68569]: DEBUG nova.virt.hardware [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1103.486236] env[68569]: DEBUG nova.virt.hardware [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1103.486411] env[68569]: DEBUG nova.virt.hardware [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1103.486576] env[68569]: DEBUG nova.virt.hardware [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1103.487429] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e536b27-e111-4c04-b245-8107dd93daaa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.495412] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8487829a-c049-4dd3-8220-f32376e33974 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.557418] env[68569]: INFO nova.compute.manager [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Took 13.78 seconds to build instance. [ 1103.603818] env[68569]: DEBUG nova.compute.manager [req-2eabd2e4-8aea-4e22-949a-e0b32c3afd94 req-fe8848cc-97c1-41ac-8acd-ca022ba8f39c service nova] [instance: 24a99398-105c-403f-8921-939beb40d447] Received event network-vif-plugged-35bc7caa-5b98-48e6-92f2-3dbb6c559faf {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1103.604061] env[68569]: DEBUG oslo_concurrency.lockutils [req-2eabd2e4-8aea-4e22-949a-e0b32c3afd94 req-fe8848cc-97c1-41ac-8acd-ca022ba8f39c service nova] Acquiring lock "24a99398-105c-403f-8921-939beb40d447-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.604281] env[68569]: DEBUG oslo_concurrency.lockutils [req-2eabd2e4-8aea-4e22-949a-e0b32c3afd94 req-fe8848cc-97c1-41ac-8acd-ca022ba8f39c service nova] Lock "24a99398-105c-403f-8921-939beb40d447-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.604448] env[68569]: DEBUG oslo_concurrency.lockutils [req-2eabd2e4-8aea-4e22-949a-e0b32c3afd94 req-fe8848cc-97c1-41ac-8acd-ca022ba8f39c service nova] Lock "24a99398-105c-403f-8921-939beb40d447-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.604615] env[68569]: DEBUG nova.compute.manager [req-2eabd2e4-8aea-4e22-949a-e0b32c3afd94 req-fe8848cc-97c1-41ac-8acd-ca022ba8f39c service nova] [instance: 24a99398-105c-403f-8921-939beb40d447] No waiting events found dispatching network-vif-plugged-35bc7caa-5b98-48e6-92f2-3dbb6c559faf {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1103.604870] env[68569]: WARNING nova.compute.manager [req-2eabd2e4-8aea-4e22-949a-e0b32c3afd94 req-fe8848cc-97c1-41ac-8acd-ca022ba8f39c service nova] [instance: 24a99398-105c-403f-8921-939beb40d447] Received unexpected event network-vif-plugged-35bc7caa-5b98-48e6-92f2-3dbb6c559faf for instance with vm_state building and task_state spawning. [ 1103.691224] env[68569]: DEBUG nova.network.neutron [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Successfully updated port: 35bc7caa-5b98-48e6-92f2-3dbb6c559faf {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1103.695317] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.266s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.695785] env[68569]: DEBUG nova.compute.manager [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1103.698111] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.137s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.698331] env[68569]: DEBUG nova.objects.instance [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lazy-loading 'resources' on Instance uuid 671f7e6d-703e-48a9-8509-2a8924afe911 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1104.059734] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12d6cb4f-70a9-4c55-958b-05de8a64a7ce tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.285s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.202055] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "refresh_cache-24a99398-105c-403f-8921-939beb40d447" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.202055] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired lock "refresh_cache-24a99398-105c-403f-8921-939beb40d447" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.202055] env[68569]: DEBUG nova.network.neutron [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1104.202897] env[68569]: DEBUG nova.compute.utils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1104.207519] env[68569]: DEBUG nova.compute.manager [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1104.207825] env[68569]: DEBUG nova.network.neutron [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1104.248533] env[68569]: DEBUG nova.policy [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e16bef4b0a6d4a5e937e4f3c4a3329b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67e07f7ab9ab41feb4d71e1d128d093d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1104.352664] env[68569]: DEBUG nova.compute.manager [req-409df5b0-d1ce-48ab-bebe-d4145c5acf9f req-1b56d355-0906-41c8-aa05-6c8f9cd55b0a service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Received event network-changed-8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1104.352664] env[68569]: DEBUG nova.compute.manager [req-409df5b0-d1ce-48ab-bebe-d4145c5acf9f req-1b56d355-0906-41c8-aa05-6c8f9cd55b0a service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Refreshing instance network info cache due to event network-changed-8b05f57b-2ff2-49af-8333-0047f5230208. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1104.352842] env[68569]: DEBUG oslo_concurrency.lockutils [req-409df5b0-d1ce-48ab-bebe-d4145c5acf9f req-1b56d355-0906-41c8-aa05-6c8f9cd55b0a service nova] Acquiring lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.353108] env[68569]: DEBUG oslo_concurrency.lockutils [req-409df5b0-d1ce-48ab-bebe-d4145c5acf9f req-1b56d355-0906-41c8-aa05-6c8f9cd55b0a service nova] Acquired lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.353189] env[68569]: DEBUG nova.network.neutron [req-409df5b0-d1ce-48ab-bebe-d4145c5acf9f req-1b56d355-0906-41c8-aa05-6c8f9cd55b0a service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Refreshing network info cache for port 8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1104.380048] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c59f45b-21c7-4534-ac2f-da041522b46b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.387885] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9edf9e17-16a3-4471-93e2-f78ff33a38c0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.418808] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7e1785-3345-4872-af5a-71d4cf6f16f7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.425992] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa081799-f1b6-4c78-bacb-15aa74edd7b4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.438850] env[68569]: DEBUG nova.compute.provider_tree [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.506591] env[68569]: DEBUG nova.network.neutron [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Successfully created port: 91fa33f6-41cd-4b38-8be4-73b5663b2537 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1104.710251] env[68569]: DEBUG nova.compute.manager [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1104.742244] env[68569]: DEBUG nova.network.neutron [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1104.885554] env[68569]: DEBUG nova.network.neutron [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Updating instance_info_cache with network_info: [{"id": "35bc7caa-5b98-48e6-92f2-3dbb6c559faf", "address": "fa:16:3e:cc:7a:3a", "network": {"id": "bf7e9923-9223-4b14-8176-d3910d2b1c90", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1728039945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62ff17f9dcc242e0aff061402e57bdcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35bc7caa-5b", "ovs_interfaceid": "35bc7caa-5b98-48e6-92f2-3dbb6c559faf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.941640] env[68569]: DEBUG nova.scheduler.client.report [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1105.071983] env[68569]: DEBUG nova.network.neutron [req-409df5b0-d1ce-48ab-bebe-d4145c5acf9f req-1b56d355-0906-41c8-aa05-6c8f9cd55b0a service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Updated VIF entry in instance network info cache for port 8b05f57b-2ff2-49af-8333-0047f5230208. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1105.072444] env[68569]: DEBUG nova.network.neutron [req-409df5b0-d1ce-48ab-bebe-d4145c5acf9f req-1b56d355-0906-41c8-aa05-6c8f9cd55b0a service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Updating instance_info_cache with network_info: [{"id": "8b05f57b-2ff2-49af-8333-0047f5230208", "address": "fa:16:3e:54:80:53", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b05f57b-2f", "ovs_interfaceid": "8b05f57b-2ff2-49af-8333-0047f5230208", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.388147] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lock "refresh_cache-24a99398-105c-403f-8921-939beb40d447" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1105.388453] env[68569]: DEBUG nova.compute.manager [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Instance network_info: |[{"id": "35bc7caa-5b98-48e6-92f2-3dbb6c559faf", "address": "fa:16:3e:cc:7a:3a", "network": {"id": "bf7e9923-9223-4b14-8176-d3910d2b1c90", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1728039945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62ff17f9dcc242e0aff061402e57bdcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35bc7caa-5b", "ovs_interfaceid": "35bc7caa-5b98-48e6-92f2-3dbb6c559faf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1105.388880] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:7a:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '667a2e97-c1be-421d-9941-6b84c2629b43', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35bc7caa-5b98-48e6-92f2-3dbb6c559faf', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1105.396500] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1105.396700] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24a99398-105c-403f-8921-939beb40d447] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1105.396921] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa493339-b5c7-4626-98d2-6cb3a8db8f03 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.417422] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1105.417422] env[68569]: value = "task-3167718" [ 1105.417422] env[68569]: _type = "Task" [ 1105.417422] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.424745] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167718, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.446562] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.748s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.470313] env[68569]: INFO nova.scheduler.client.report [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleted allocations for instance 671f7e6d-703e-48a9-8509-2a8924afe911 [ 1105.575189] env[68569]: DEBUG oslo_concurrency.lockutils [req-409df5b0-d1ce-48ab-bebe-d4145c5acf9f req-1b56d355-0906-41c8-aa05-6c8f9cd55b0a service nova] Releasing lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1105.629523] env[68569]: DEBUG nova.compute.manager [req-6f0edec4-695e-4a23-9fbc-70aba8001cb4 req-89468f6f-563e-465c-b36c-9f65db8bb3c6 service nova] [instance: 24a99398-105c-403f-8921-939beb40d447] Received event network-changed-35bc7caa-5b98-48e6-92f2-3dbb6c559faf {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1105.629784] env[68569]: DEBUG nova.compute.manager [req-6f0edec4-695e-4a23-9fbc-70aba8001cb4 req-89468f6f-563e-465c-b36c-9f65db8bb3c6 service nova] [instance: 24a99398-105c-403f-8921-939beb40d447] Refreshing instance network info cache due to event network-changed-35bc7caa-5b98-48e6-92f2-3dbb6c559faf. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1105.630017] env[68569]: DEBUG oslo_concurrency.lockutils [req-6f0edec4-695e-4a23-9fbc-70aba8001cb4 req-89468f6f-563e-465c-b36c-9f65db8bb3c6 service nova] Acquiring lock "refresh_cache-24a99398-105c-403f-8921-939beb40d447" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.630241] env[68569]: DEBUG oslo_concurrency.lockutils [req-6f0edec4-695e-4a23-9fbc-70aba8001cb4 req-89468f6f-563e-465c-b36c-9f65db8bb3c6 service nova] Acquired lock "refresh_cache-24a99398-105c-403f-8921-939beb40d447" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.630430] env[68569]: DEBUG nova.network.neutron [req-6f0edec4-695e-4a23-9fbc-70aba8001cb4 req-89468f6f-563e-465c-b36c-9f65db8bb3c6 service nova] [instance: 24a99398-105c-403f-8921-939beb40d447] Refreshing network info cache for port 35bc7caa-5b98-48e6-92f2-3dbb6c559faf {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1105.719690] env[68569]: DEBUG nova.compute.manager [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1105.746176] env[68569]: DEBUG nova.virt.hardware [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1105.746351] env[68569]: DEBUG nova.virt.hardware [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1105.746549] env[68569]: DEBUG nova.virt.hardware [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1105.746739] env[68569]: DEBUG nova.virt.hardware [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1105.746885] env[68569]: DEBUG nova.virt.hardware [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1105.747056] env[68569]: DEBUG nova.virt.hardware [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1105.747273] env[68569]: DEBUG nova.virt.hardware [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1105.747431] env[68569]: DEBUG nova.virt.hardware [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1105.747594] env[68569]: DEBUG nova.virt.hardware [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1105.747751] env[68569]: DEBUG nova.virt.hardware [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1105.747920] env[68569]: DEBUG nova.virt.hardware [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1105.748833] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0569c040-171a-4997-a7ab-b797451f9ddc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.756964] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdbf601-44ac-4f6c-8a03-0a1675675a2c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.927628] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167718, 'name': CreateVM_Task, 'duration_secs': 0.400677} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.927798] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24a99398-105c-403f-8921-939beb40d447] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1105.928568] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.928729] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.930093] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1105.930093] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da57f417-0836-4dc3-abdd-aac64f683a7f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.933751] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1105.933751] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f15b3a-dab7-7dcc-edfb-75b52bb46fe8" [ 1105.933751] env[68569]: _type = "Task" [ 1105.933751] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.941189] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f15b3a-dab7-7dcc-edfb-75b52bb46fe8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.952121] env[68569]: DEBUG nova.network.neutron [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Successfully updated port: 91fa33f6-41cd-4b38-8be4-73b5663b2537 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1105.978061] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1aff2331-c0ea-4dd2-bd25-10d08d2bc723 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "671f7e6d-703e-48a9-8509-2a8924afe911" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.257s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.309323] env[68569]: DEBUG nova.network.neutron [req-6f0edec4-695e-4a23-9fbc-70aba8001cb4 req-89468f6f-563e-465c-b36c-9f65db8bb3c6 service nova] [instance: 24a99398-105c-403f-8921-939beb40d447] Updated VIF entry in instance network info cache for port 35bc7caa-5b98-48e6-92f2-3dbb6c559faf. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1106.309746] env[68569]: DEBUG nova.network.neutron [req-6f0edec4-695e-4a23-9fbc-70aba8001cb4 req-89468f6f-563e-465c-b36c-9f65db8bb3c6 service nova] [instance: 24a99398-105c-403f-8921-939beb40d447] Updating instance_info_cache with network_info: [{"id": "35bc7caa-5b98-48e6-92f2-3dbb6c559faf", "address": "fa:16:3e:cc:7a:3a", "network": {"id": "bf7e9923-9223-4b14-8176-d3910d2b1c90", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1728039945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62ff17f9dcc242e0aff061402e57bdcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35bc7caa-5b", "ovs_interfaceid": "35bc7caa-5b98-48e6-92f2-3dbb6c559faf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.444259] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f15b3a-dab7-7dcc-edfb-75b52bb46fe8, 'name': SearchDatastore_Task, 'duration_secs': 0.010369} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.444622] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.444885] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1106.445237] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.445410] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.445596] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1106.445956] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e850f574-867a-4d13-afda-4346d9e7e086 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.454726] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "refresh_cache-415b9040-15c3-472f-b427-c39e2c456764" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.454963] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "refresh_cache-415b9040-15c3-472f-b427-c39e2c456764" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.455185] env[68569]: DEBUG nova.network.neutron [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1106.460963] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1106.461153] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1106.461892] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e221d78b-8e13-4b04-a1e3-bfd792c9f971 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.468070] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1106.468070] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52871843-9d1b-1869-b39a-d43e1fc67cf0" [ 1106.468070] env[68569]: _type = "Task" [ 1106.468070] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.476146] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52871843-9d1b-1869-b39a-d43e1fc67cf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.812732] env[68569]: DEBUG oslo_concurrency.lockutils [req-6f0edec4-695e-4a23-9fbc-70aba8001cb4 req-89468f6f-563e-465c-b36c-9f65db8bb3c6 service nova] Releasing lock "refresh_cache-24a99398-105c-403f-8921-939beb40d447" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.980234] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52871843-9d1b-1869-b39a-d43e1fc67cf0, 'name': SearchDatastore_Task, 'duration_secs': 0.011075} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.981059] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0593d55a-dd35-41fb-857b-6376441044bd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.985465] env[68569]: DEBUG nova.network.neutron [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1106.990041] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1106.990041] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52766bd7-ff11-c13f-d8c8-7073115edc9c" [ 1106.990041] env[68569]: _type = "Task" [ 1106.990041] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.000869] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52766bd7-ff11-c13f-d8c8-7073115edc9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.104985] env[68569]: DEBUG nova.network.neutron [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Updating instance_info_cache with network_info: [{"id": "91fa33f6-41cd-4b38-8be4-73b5663b2537", "address": "fa:16:3e:56:ac:fd", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91fa33f6-41", "ovs_interfaceid": "91fa33f6-41cd-4b38-8be4-73b5663b2537", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.500255] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52766bd7-ff11-c13f-d8c8-7073115edc9c, 'name': SearchDatastore_Task, 'duration_secs': 0.031171} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.500539] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.500762] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 24a99398-105c-403f-8921-939beb40d447/24a99398-105c-403f-8921-939beb40d447.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1107.501030] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36d047d4-7746-4a63-b4d4-dea60888c1da {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.507262] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1107.507262] env[68569]: value = "task-3167719" [ 1107.507262] env[68569]: _type = "Task" [ 1107.507262] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.515050] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167719, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.608192] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "refresh_cache-415b9040-15c3-472f-b427-c39e2c456764" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.608618] env[68569]: DEBUG nova.compute.manager [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Instance network_info: |[{"id": "91fa33f6-41cd-4b38-8be4-73b5663b2537", "address": "fa:16:3e:56:ac:fd", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91fa33f6-41", "ovs_interfaceid": "91fa33f6-41cd-4b38-8be4-73b5663b2537", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1107.609195] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:ac:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54c45719-5690-47bf-b45b-6cad9813071e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91fa33f6-41cd-4b38-8be4-73b5663b2537', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1107.617834] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1107.618091] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1107.618383] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d3fb8b6-d53a-4262-89b8-e815780cf292 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.640310] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1107.640310] env[68569]: value = "task-3167720" [ 1107.640310] env[68569]: _type = "Task" [ 1107.640310] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.649360] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167720, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.657010] env[68569]: DEBUG nova.compute.manager [req-4d4a7a59-c52a-4786-97ae-bde6945b8618 req-c2282767-c866-4b27-ba69-ede651414abe service nova] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Received event network-vif-plugged-91fa33f6-41cd-4b38-8be4-73b5663b2537 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1107.657342] env[68569]: DEBUG oslo_concurrency.lockutils [req-4d4a7a59-c52a-4786-97ae-bde6945b8618 req-c2282767-c866-4b27-ba69-ede651414abe service nova] Acquiring lock "415b9040-15c3-472f-b427-c39e2c456764-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.657456] env[68569]: DEBUG oslo_concurrency.lockutils [req-4d4a7a59-c52a-4786-97ae-bde6945b8618 req-c2282767-c866-4b27-ba69-ede651414abe service nova] Lock "415b9040-15c3-472f-b427-c39e2c456764-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.657628] env[68569]: DEBUG oslo_concurrency.lockutils [req-4d4a7a59-c52a-4786-97ae-bde6945b8618 req-c2282767-c866-4b27-ba69-ede651414abe service nova] Lock "415b9040-15c3-472f-b427-c39e2c456764-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.657794] env[68569]: DEBUG nova.compute.manager [req-4d4a7a59-c52a-4786-97ae-bde6945b8618 req-c2282767-c866-4b27-ba69-ede651414abe service nova] [instance: 415b9040-15c3-472f-b427-c39e2c456764] No waiting events found dispatching network-vif-plugged-91fa33f6-41cd-4b38-8be4-73b5663b2537 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1107.657952] env[68569]: WARNING nova.compute.manager [req-4d4a7a59-c52a-4786-97ae-bde6945b8618 req-c2282767-c866-4b27-ba69-ede651414abe service nova] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Received unexpected event network-vif-plugged-91fa33f6-41cd-4b38-8be4-73b5663b2537 for instance with vm_state building and task_state spawning. [ 1107.658120] env[68569]: DEBUG nova.compute.manager [req-4d4a7a59-c52a-4786-97ae-bde6945b8618 req-c2282767-c866-4b27-ba69-ede651414abe service nova] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Received event network-changed-91fa33f6-41cd-4b38-8be4-73b5663b2537 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1107.658271] env[68569]: DEBUG nova.compute.manager [req-4d4a7a59-c52a-4786-97ae-bde6945b8618 req-c2282767-c866-4b27-ba69-ede651414abe service nova] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Refreshing instance network info cache due to event network-changed-91fa33f6-41cd-4b38-8be4-73b5663b2537. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1107.658523] env[68569]: DEBUG oslo_concurrency.lockutils [req-4d4a7a59-c52a-4786-97ae-bde6945b8618 req-c2282767-c866-4b27-ba69-ede651414abe service nova] Acquiring lock "refresh_cache-415b9040-15c3-472f-b427-c39e2c456764" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.658661] env[68569]: DEBUG oslo_concurrency.lockutils [req-4d4a7a59-c52a-4786-97ae-bde6945b8618 req-c2282767-c866-4b27-ba69-ede651414abe service nova] Acquired lock "refresh_cache-415b9040-15c3-472f-b427-c39e2c456764" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1107.658814] env[68569]: DEBUG nova.network.neutron [req-4d4a7a59-c52a-4786-97ae-bde6945b8618 req-c2282767-c866-4b27-ba69-ede651414abe service nova] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Refreshing network info cache for port 91fa33f6-41cd-4b38-8be4-73b5663b2537 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1108.017957] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167719, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.422613} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.018524] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 24a99398-105c-403f-8921-939beb40d447/24a99398-105c-403f-8921-939beb40d447.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1108.018660] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1108.018824] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b87aa272-1e35-482a-81eb-62efecf57090 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.025237] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1108.025237] env[68569]: value = "task-3167721" [ 1108.025237] env[68569]: _type = "Task" [ 1108.025237] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.032908] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167721, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.150074] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167720, 'name': CreateVM_Task, 'duration_secs': 0.505352} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.150292] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1108.150890] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.151072] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.151393] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1108.151656] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7d58e0f-d929-4398-968f-601b8722464d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.155937] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1108.155937] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5243d220-2bbc-0261-991e-6a43efc060b9" [ 1108.155937] env[68569]: _type = "Task" [ 1108.155937] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.164935] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5243d220-2bbc-0261-991e-6a43efc060b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.537103] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167721, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.279546} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.537434] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1108.538131] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9691c8da-a42b-40e3-919b-8f992fb82e7a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.560739] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 24a99398-105c-403f-8921-939beb40d447/24a99398-105c-403f-8921-939beb40d447.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1108.561027] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0297f40-bb87-4432-b545-a1acf3f9e7d9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.576942] env[68569]: DEBUG nova.network.neutron [req-4d4a7a59-c52a-4786-97ae-bde6945b8618 req-c2282767-c866-4b27-ba69-ede651414abe service nova] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Updated VIF entry in instance network info cache for port 91fa33f6-41cd-4b38-8be4-73b5663b2537. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1108.577271] env[68569]: DEBUG nova.network.neutron [req-4d4a7a59-c52a-4786-97ae-bde6945b8618 req-c2282767-c866-4b27-ba69-ede651414abe service nova] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Updating instance_info_cache with network_info: [{"id": "91fa33f6-41cd-4b38-8be4-73b5663b2537", "address": "fa:16:3e:56:ac:fd", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91fa33f6-41", "ovs_interfaceid": "91fa33f6-41cd-4b38-8be4-73b5663b2537", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.584873] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1108.584873] env[68569]: value = "task-3167722" [ 1108.584873] env[68569]: _type = "Task" [ 1108.584873] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.593827] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167722, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.665978] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5243d220-2bbc-0261-991e-6a43efc060b9, 'name': SearchDatastore_Task, 'duration_secs': 0.007699} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.666288] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.666516] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1108.666747] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.666891] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.667077] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1108.667330] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51e29bc9-693e-4ff7-9b7a-a6913f2a439e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.675875] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1108.676052] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1108.676717] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e89aead-9a55-4438-b11f-83e939e5b105 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.682261] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1108.682261] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ea8017-c3f0-2fb4-80a7-277fc48c9a79" [ 1108.682261] env[68569]: _type = "Task" [ 1108.682261] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.690655] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ea8017-c3f0-2fb4-80a7-277fc48c9a79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.080362] env[68569]: DEBUG oslo_concurrency.lockutils [req-4d4a7a59-c52a-4786-97ae-bde6945b8618 req-c2282767-c866-4b27-ba69-ede651414abe service nova] Releasing lock "refresh_cache-415b9040-15c3-472f-b427-c39e2c456764" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.095511] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167722, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.195355] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ea8017-c3f0-2fb4-80a7-277fc48c9a79, 'name': SearchDatastore_Task, 'duration_secs': 0.008498} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.196564] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7e94102-5c1c-4fc7-8c99-73fae8381a5a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.204545] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1109.204545] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b59d4c-b671-a026-33f4-b4349e1b311e" [ 1109.204545] env[68569]: _type = "Task" [ 1109.204545] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.214929] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b59d4c-b671-a026-33f4-b4349e1b311e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.596422] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167722, 'name': ReconfigVM_Task, 'duration_secs': 0.55753} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.596734] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 24a99398-105c-403f-8921-939beb40d447/24a99398-105c-403f-8921-939beb40d447.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1109.597294] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0fa5cc36-85ca-4bd9-bb41-c69b7657f5ac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.604907] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1109.604907] env[68569]: value = "task-3167723" [ 1109.604907] env[68569]: _type = "Task" [ 1109.604907] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.614716] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167723, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.715787] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b59d4c-b671-a026-33f4-b4349e1b311e, 'name': SearchDatastore_Task, 'duration_secs': 0.010246} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.716065] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.716320] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 415b9040-15c3-472f-b427-c39e2c456764/415b9040-15c3-472f-b427-c39e2c456764.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1109.716607] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-584d60ab-0033-4a71-8374-b890a8fca4b3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.725145] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1109.725145] env[68569]: value = "task-3167724" [ 1109.725145] env[68569]: _type = "Task" [ 1109.725145] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.733406] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167724, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.115969] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167723, 'name': Rename_Task, 'duration_secs': 0.145317} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.116332] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1110.116599] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37d59f61-5ebb-43ea-9902-83e6de6cebf5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.125909] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1110.125909] env[68569]: value = "task-3167725" [ 1110.125909] env[68569]: _type = "Task" [ 1110.125909] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.137918] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167725, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.235536] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167724, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.636373] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167725, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.737763] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167724, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54045} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.738063] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 415b9040-15c3-472f-b427-c39e2c456764/415b9040-15c3-472f-b427-c39e2c456764.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1110.738222] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1110.738497] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f79e8313-40c1-4fa6-9e5d-3f0754aa658a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.746230] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1110.746230] env[68569]: value = "task-3167726" [ 1110.746230] env[68569]: _type = "Task" [ 1110.746230] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.754291] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167726, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.136839] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167725, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.256912] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167726, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074157} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.257177] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1111.257949] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c32e3c6-e712-4518-90d1-dc1b080c323e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.280279] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 415b9040-15c3-472f-b427-c39e2c456764/415b9040-15c3-472f-b427-c39e2c456764.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1111.280536] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5968dd5-b801-40f1-ac8f-1867a1c04a66 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.300934] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1111.300934] env[68569]: value = "task-3167727" [ 1111.300934] env[68569]: _type = "Task" [ 1111.300934] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.310567] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167727, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.640130] env[68569]: DEBUG oslo_vmware.api [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167725, 'name': PowerOnVM_Task, 'duration_secs': 1.04915} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.640502] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1111.640761] env[68569]: INFO nova.compute.manager [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Took 8.19 seconds to spawn the instance on the hypervisor. [ 1111.640939] env[68569]: DEBUG nova.compute.manager [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1111.641909] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee4addc3-e437-48b8-846c-8c068c260d16 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.811017] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167727, 'name': ReconfigVM_Task, 'duration_secs': 0.37499} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.811318] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 415b9040-15c3-472f-b427-c39e2c456764/415b9040-15c3-472f-b427-c39e2c456764.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1111.811961] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de9c06c1-3126-4fb3-b4e5-e6929c6c5f0b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.822166] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1111.822166] env[68569]: value = "task-3167728" [ 1111.822166] env[68569]: _type = "Task" [ 1111.822166] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.830833] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167728, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.159322] env[68569]: INFO nova.compute.manager [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Took 13.97 seconds to build instance. [ 1112.333823] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167728, 'name': Rename_Task, 'duration_secs': 0.327264} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.334128] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1112.334392] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8203d8a9-a12b-4295-b76f-5775d004f01b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.341914] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1112.341914] env[68569]: value = "task-3167729" [ 1112.341914] env[68569]: _type = "Task" [ 1112.341914] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.350623] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167729, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.493876] env[68569]: DEBUG oslo_concurrency.lockutils [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.494010] env[68569]: DEBUG oslo_concurrency.lockutils [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.661539] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f3470013-8585-4353-b7fc-3edbbb2ec9fb tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "24a99398-105c-403f-8921-939beb40d447" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.477s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.853456] env[68569]: DEBUG oslo_vmware.api [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167729, 'name': PowerOnVM_Task, 'duration_secs': 0.505033} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.853859] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1112.854177] env[68569]: INFO nova.compute.manager [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Took 7.13 seconds to spawn the instance on the hypervisor. [ 1112.854454] env[68569]: DEBUG nova.compute.manager [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1112.855639] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd56b18a-9aa9-427f-a159-ee0267fcaed0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.997614] env[68569]: DEBUG nova.compute.utils [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1113.375106] env[68569]: INFO nova.compute.manager [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Took 13.18 seconds to build instance. [ 1113.381023] env[68569]: DEBUG nova.compute.manager [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1113.381765] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97eb1261-ce02-4b1d-989b-ca1e12d50fea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.500811] env[68569]: DEBUG oslo_concurrency.lockutils [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.877529] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c1821e74-73b9-4c60-b0e8-6454f8c66742 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "415b9040-15c3-472f-b427-c39e2c456764" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.695s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.892924] env[68569]: INFO nova.compute.manager [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] instance snapshotting [ 1113.895692] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dcc0c85-c623-4ffe-9c71-14bb86f7b12c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.916105] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfa20fb-9fb7-4ad6-ad0b-01c2c6240264 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.427540] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1114.427809] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b9c50299-9727-4ce7-b0ac-6a115f5458e5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.438913] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1114.438913] env[68569]: value = "task-3167730" [ 1114.438913] env[68569]: _type = "Task" [ 1114.438913] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.446918] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167730, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.568191] env[68569]: DEBUG oslo_concurrency.lockutils [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.568522] env[68569]: DEBUG oslo_concurrency.lockutils [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.568752] env[68569]: INFO nova.compute.manager [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Attaching volume e7b28a43-bb29-4633-ac1f-9b8194229203 to /dev/sdb [ 1114.598144] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f50bcc-f726-4c25-8e6d-e81074be1aa6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.605707] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-387302ca-6b42-4290-8407-ca6152e38c8c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.620215] env[68569]: DEBUG nova.virt.block_device [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Updating existing volume attachment record: c3c12189-7623-48bf-852e-d3b78f870a3a {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1114.705835] env[68569]: DEBUG oslo_concurrency.lockutils [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "415b9040-15c3-472f-b427-c39e2c456764" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.706134] env[68569]: DEBUG oslo_concurrency.lockutils [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "415b9040-15c3-472f-b427-c39e2c456764" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.706435] env[68569]: DEBUG oslo_concurrency.lockutils [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "415b9040-15c3-472f-b427-c39e2c456764-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.706625] env[68569]: DEBUG oslo_concurrency.lockutils [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "415b9040-15c3-472f-b427-c39e2c456764-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.706795] env[68569]: DEBUG oslo_concurrency.lockutils [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "415b9040-15c3-472f-b427-c39e2c456764-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.709131] env[68569]: INFO nova.compute.manager [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Terminating instance [ 1114.949026] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167730, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.214341] env[68569]: DEBUG nova.compute.manager [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1115.214501] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1115.215668] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a742c7-80d2-40e3-97a3-b9e0f8864ecf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.226522] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1115.227494] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a48a737-86f1-421a-b400-41f560e066b3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.235141] env[68569]: DEBUG oslo_vmware.api [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1115.235141] env[68569]: value = "task-3167732" [ 1115.235141] env[68569]: _type = "Task" [ 1115.235141] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.244850] env[68569]: DEBUG oslo_vmware.api [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167732, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.450517] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167730, 'name': CreateSnapshot_Task, 'duration_secs': 0.526023} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.450876] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1115.451641] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7216cefc-7370-4285-87a4-ce6709b51020 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.716198] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "a7145443-aacb-4d9e-8e39-3741d0630849" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.716295] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a7145443-aacb-4d9e-8e39-3741d0630849" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.716731] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "a7145443-aacb-4d9e-8e39-3741d0630849-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.716801] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a7145443-aacb-4d9e-8e39-3741d0630849-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.716984] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a7145443-aacb-4d9e-8e39-3741d0630849-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.719453] env[68569]: INFO nova.compute.manager [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Terminating instance [ 1115.746158] env[68569]: DEBUG oslo_vmware.api [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167732, 'name': PowerOffVM_Task, 'duration_secs': 0.187125} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.746451] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1115.746628] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1115.746867] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-389f6eca-3bf0-4feb-958a-fab00aa843e5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.840942] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1115.841212] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1115.841401] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleting the datastore file [datastore2] 415b9040-15c3-472f-b427-c39e2c456764 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1115.841685] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-86527c93-0aff-43a6-8111-0f41439a3224 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.849973] env[68569]: DEBUG oslo_vmware.api [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1115.849973] env[68569]: value = "task-3167734" [ 1115.849973] env[68569]: _type = "Task" [ 1115.849973] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.857896] env[68569]: DEBUG oslo_vmware.api [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167734, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.969027] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1115.969430] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-38fa88d2-e38b-4675-8ef6-d7caf227241c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.980302] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1115.980302] env[68569]: value = "task-3167735" [ 1115.980302] env[68569]: _type = "Task" [ 1115.980302] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.990673] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167735, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.222953] env[68569]: DEBUG nova.compute.manager [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1116.223152] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1116.223987] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc51a42-22bd-4b2a-ac68-6257cde9fe01 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.232207] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1116.232464] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-046f3c5d-b0e4-4093-b524-bcf987a15358 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.239423] env[68569]: DEBUG oslo_vmware.api [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1116.239423] env[68569]: value = "task-3167736" [ 1116.239423] env[68569]: _type = "Task" [ 1116.239423] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.248448] env[68569]: DEBUG oslo_vmware.api [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167736, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.361143] env[68569]: DEBUG oslo_vmware.api [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167734, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.302175} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.361521] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1116.361739] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1116.361954] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1116.362155] env[68569]: INFO nova.compute.manager [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1116.362430] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1116.362662] env[68569]: DEBUG nova.compute.manager [-] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1116.362801] env[68569]: DEBUG nova.network.neutron [-] [instance: 415b9040-15c3-472f-b427-c39e2c456764] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1116.491211] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167735, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.630856] env[68569]: DEBUG nova.compute.manager [req-153c0968-d307-4b0f-82f4-5cab1decc4e3 req-b10be0d3-00b9-49d0-adc2-c0073789db71 service nova] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Received event network-vif-deleted-91fa33f6-41cd-4b38-8be4-73b5663b2537 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1116.630856] env[68569]: INFO nova.compute.manager [req-153c0968-d307-4b0f-82f4-5cab1decc4e3 req-b10be0d3-00b9-49d0-adc2-c0073789db71 service nova] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Neutron deleted interface 91fa33f6-41cd-4b38-8be4-73b5663b2537; detaching it from the instance and deleting it from the info cache [ 1116.631018] env[68569]: DEBUG nova.network.neutron [req-153c0968-d307-4b0f-82f4-5cab1decc4e3 req-b10be0d3-00b9-49d0-adc2-c0073789db71 service nova] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.750575] env[68569]: DEBUG oslo_vmware.api [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167736, 'name': PowerOffVM_Task, 'duration_secs': 0.208891} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.750823] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1116.751050] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1116.751324] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d785a65-c907-472a-8d69-1f23d6a5cb27 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.817029] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1116.817029] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1116.817029] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Deleting the datastore file [datastore1] a7145443-aacb-4d9e-8e39-3741d0630849 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1116.817029] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-750b5735-beb6-4278-b327-0111052a4b05 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.825040] env[68569]: DEBUG oslo_vmware.api [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1116.825040] env[68569]: value = "task-3167738" [ 1116.825040] env[68569]: _type = "Task" [ 1116.825040] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.835249] env[68569]: DEBUG oslo_vmware.api [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167738, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.992241] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167735, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.112087] env[68569]: DEBUG nova.network.neutron [-] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.133938] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-92a704b4-574c-4f87-bb9f-5d3027f67f94 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.144532] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07aff49b-f043-4a52-837e-bde7b4b95757 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.180480] env[68569]: DEBUG nova.compute.manager [req-153c0968-d307-4b0f-82f4-5cab1decc4e3 req-b10be0d3-00b9-49d0-adc2-c0073789db71 service nova] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Detach interface failed, port_id=91fa33f6-41cd-4b38-8be4-73b5663b2537, reason: Instance 415b9040-15c3-472f-b427-c39e2c456764 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1117.335630] env[68569]: DEBUG oslo_vmware.api [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167738, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130265} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.335933] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1117.336218] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1117.336420] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1117.336598] env[68569]: INFO nova.compute.manager [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1117.336834] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1117.337035] env[68569]: DEBUG nova.compute.manager [-] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1117.337129] env[68569]: DEBUG nova.network.neutron [-] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1117.494940] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167735, 'name': CloneVM_Task} progress is 95%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.615363] env[68569]: INFO nova.compute.manager [-] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Took 1.25 seconds to deallocate network for instance. [ 1117.993904] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167735, 'name': CloneVM_Task, 'duration_secs': 1.819495} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.994244] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Created linked-clone VM from snapshot [ 1117.994977] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47721e94-fc01-4b86-ae89-c73fbd8698d1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.002838] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Uploading image facc9c3e-0928-4e30-a844-e55bbbb5de95 {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1118.015253] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1118.015525] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-01c3e4b5-414b-4e9c-bb8a-213e33a55bd5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.023468] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1118.023468] env[68569]: value = "task-3167740" [ 1118.023468] env[68569]: _type = "Task" [ 1118.023468] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.031330] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167740, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.121744] env[68569]: DEBUG oslo_concurrency.lockutils [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.122067] env[68569]: DEBUG oslo_concurrency.lockutils [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.122303] env[68569]: DEBUG nova.objects.instance [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lazy-loading 'resources' on Instance uuid 415b9040-15c3-472f-b427-c39e2c456764 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1118.237117] env[68569]: DEBUG nova.network.neutron [-] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.534263] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167740, 'name': Destroy_Task, 'duration_secs': 0.306341} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.534570] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Destroyed the VM [ 1118.534810] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1118.535071] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-00367553-522f-432c-9d01-200f8f60e965 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.542522] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1118.542522] env[68569]: value = "task-3167741" [ 1118.542522] env[68569]: _type = "Task" [ 1118.542522] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.550889] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167741, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.657176] env[68569]: DEBUG nova.compute.manager [req-f29798e6-6d0a-4ac7-bdd1-07a59d8ee283 req-85a061d9-a82e-41f6-a3ff-0821f47dbfec service nova] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Received event network-vif-deleted-82d84faa-d446-4ed1-b1c1-65399875d3f2 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1118.739696] env[68569]: INFO nova.compute.manager [-] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Took 1.40 seconds to deallocate network for instance. [ 1118.780545] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f33d92b-9538-4166-8ec6-624f6b589835 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.789353] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77887b3-dc9d-47f3-9c9a-ada3c65df451 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.821332] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ec7920-0f33-46d7-9dcd-3da2dec64bef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.829075] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3380098-e951-4068-a767-de1d8eb33bc9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.847009] env[68569]: DEBUG nova.compute.provider_tree [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1119.054136] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167741, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.166009] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1119.166374] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633712', 'volume_id': 'e7b28a43-bb29-4633-ac1f-9b8194229203', 'name': 'volume-e7b28a43-bb29-4633-ac1f-9b8194229203', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '24bcffcc-6da1-4ae5-b802-88e9364eaf0e', 'attached_at': '', 'detached_at': '', 'volume_id': 'e7b28a43-bb29-4633-ac1f-9b8194229203', 'serial': 'e7b28a43-bb29-4633-ac1f-9b8194229203'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1119.167529] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf389cf-f9a3-4b9a-8738-1c3882070b55 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.185924] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dade79c-30d3-43cd-a3b7-59ce5131b2d2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.210845] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] volume-e7b28a43-bb29-4633-ac1f-9b8194229203/volume-e7b28a43-bb29-4633-ac1f-9b8194229203.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1119.211172] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c145b2c-1472-4f73-b1a6-6101bc507b6f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.229066] env[68569]: DEBUG oslo_vmware.api [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1119.229066] env[68569]: value = "task-3167742" [ 1119.229066] env[68569]: _type = "Task" [ 1119.229066] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.238458] env[68569]: DEBUG oslo_vmware.api [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167742, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.246545] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.350179] env[68569]: DEBUG nova.scheduler.client.report [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1119.555997] env[68569]: DEBUG oslo_vmware.api [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167741, 'name': RemoveSnapshot_Task, 'duration_secs': 0.660327} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.556304] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1119.739433] env[68569]: DEBUG oslo_vmware.api [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167742, 'name': ReconfigVM_Task, 'duration_secs': 0.371204} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.739719] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Reconfigured VM instance instance-00000063 to attach disk [datastore2] volume-e7b28a43-bb29-4633-ac1f-9b8194229203/volume-e7b28a43-bb29-4633-ac1f-9b8194229203.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1119.744347] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0db0ff66-38c7-4586-8701-f49fd6f99865 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.760346] env[68569]: DEBUG oslo_vmware.api [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1119.760346] env[68569]: value = "task-3167743" [ 1119.760346] env[68569]: _type = "Task" [ 1119.760346] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.768910] env[68569]: DEBUG oslo_vmware.api [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167743, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.855537] env[68569]: DEBUG oslo_concurrency.lockutils [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.733s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.858282] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.612s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.858604] env[68569]: DEBUG nova.objects.instance [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lazy-loading 'resources' on Instance uuid a7145443-aacb-4d9e-8e39-3741d0630849 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1119.879081] env[68569]: INFO nova.scheduler.client.report [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleted allocations for instance 415b9040-15c3-472f-b427-c39e2c456764 [ 1120.060869] env[68569]: WARNING nova.compute.manager [None req-7b04f37c-dfe6-4c97-9992-95b075b91859 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Image not found during snapshot: nova.exception.ImageNotFound: Image facc9c3e-0928-4e30-a844-e55bbbb5de95 could not be found. [ 1120.270679] env[68569]: DEBUG oslo_vmware.api [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167743, 'name': ReconfigVM_Task, 'duration_secs': 0.157418} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.271008] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633712', 'volume_id': 'e7b28a43-bb29-4633-ac1f-9b8194229203', 'name': 'volume-e7b28a43-bb29-4633-ac1f-9b8194229203', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '24bcffcc-6da1-4ae5-b802-88e9364eaf0e', 'attached_at': '', 'detached_at': '', 'volume_id': 'e7b28a43-bb29-4633-ac1f-9b8194229203', 'serial': 'e7b28a43-bb29-4633-ac1f-9b8194229203'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1120.386021] env[68569]: DEBUG oslo_concurrency.lockutils [None req-73c8a83f-4192-40a5-87cf-224f4c78d47a tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "415b9040-15c3-472f-b427-c39e2c456764" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.680s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.502517] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efebf7c-12e4-4ad1-b0ee-2e20caa8a661 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.511533] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3bc05a-8944-4347-84c5-9be7392c4494 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.548117] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "24a99398-105c-403f-8921-939beb40d447" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.548388] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "24a99398-105c-403f-8921-939beb40d447" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.548656] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "24a99398-105c-403f-8921-939beb40d447-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.548845] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "24a99398-105c-403f-8921-939beb40d447-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.549014] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "24a99398-105c-403f-8921-939beb40d447-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.551038] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad471ef-6d78-44e1-b4df-c9a0efdd20ab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.554620] env[68569]: INFO nova.compute.manager [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Terminating instance [ 1120.561870] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedbd0c5-4ab5-451d-8d1a-b866952d6186 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.576939] env[68569]: DEBUG nova.compute.provider_tree [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1121.058116] env[68569]: DEBUG nova.compute.manager [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1121.058391] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1121.059297] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f434f77c-07cd-4b83-9525-049042ca1376 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.067845] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1121.068348] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ffe778a0-a177-4488-a495-26f76335d527 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.074720] env[68569]: DEBUG oslo_vmware.api [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1121.074720] env[68569]: value = "task-3167744" [ 1121.074720] env[68569]: _type = "Task" [ 1121.074720] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.085514] env[68569]: DEBUG nova.scheduler.client.report [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1121.088294] env[68569]: DEBUG oslo_vmware.api [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167744, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.310770] env[68569]: DEBUG nova.objects.instance [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lazy-loading 'flavor' on Instance uuid 24bcffcc-6da1-4ae5-b802-88e9364eaf0e {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1121.585627] env[68569]: DEBUG oslo_vmware.api [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167744, 'name': PowerOffVM_Task, 'duration_secs': 0.17653} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.585888] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1121.586070] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1121.586345] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-699666ee-00f0-403a-86f2-087064958736 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.589912] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.732s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.609874] env[68569]: INFO nova.scheduler.client.report [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Deleted allocations for instance a7145443-aacb-4d9e-8e39-3741d0630849 [ 1121.657725] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1121.657968] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1121.658241] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleting the datastore file [datastore2] 24a99398-105c-403f-8921-939beb40d447 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1121.658571] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a14cfbf-36c9-4bdc-bde3-b03caecfd946 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.665715] env[68569]: DEBUG oslo_vmware.api [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for the task: (returnval){ [ 1121.665715] env[68569]: value = "task-3167746" [ 1121.665715] env[68569]: _type = "Task" [ 1121.665715] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.676060] env[68569]: DEBUG oslo_vmware.api [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167746, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.709215] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "fca05228-2a17-4a7e-94a0-449ba74a8933" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.711790] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "fca05228-2a17-4a7e-94a0-449ba74a8933" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.816586] env[68569]: DEBUG oslo_concurrency.lockutils [None req-06d0d125-1b62-4728-8ca3-cbbaa495a0e8 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.248s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.048694] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.048815] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.117659] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c6094f49-7228-45b3-975f-bdb70f8c7fd1 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a7145443-aacb-4d9e-8e39-3741d0630849" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.401s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.175844] env[68569]: DEBUG oslo_vmware.api [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Task: {'id': task-3167746, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139251} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.176123] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1122.176314] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1122.176488] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1122.176658] env[68569]: INFO nova.compute.manager [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] [instance: 24a99398-105c-403f-8921-939beb40d447] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1122.176893] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1122.177086] env[68569]: DEBUG nova.compute.manager [-] [instance: 24a99398-105c-403f-8921-939beb40d447] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1122.177182] env[68569]: DEBUG nova.network.neutron [-] [instance: 24a99398-105c-403f-8921-939beb40d447] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1122.213990] env[68569]: DEBUG nova.compute.manager [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1122.416485] env[68569]: DEBUG nova.compute.manager [req-1f7327f6-c897-4a94-8c14-08ac558a1b57 req-dbf778ec-e6e9-4b62-97a8-eaa65d050027 service nova] [instance: 24a99398-105c-403f-8921-939beb40d447] Received event network-vif-deleted-35bc7caa-5b98-48e6-92f2-3dbb6c559faf {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1122.416683] env[68569]: INFO nova.compute.manager [req-1f7327f6-c897-4a94-8c14-08ac558a1b57 req-dbf778ec-e6e9-4b62-97a8-eaa65d050027 service nova] [instance: 24a99398-105c-403f-8921-939beb40d447] Neutron deleted interface 35bc7caa-5b98-48e6-92f2-3dbb6c559faf; detaching it from the instance and deleting it from the info cache [ 1122.416861] env[68569]: DEBUG nova.network.neutron [req-1f7327f6-c897-4a94-8c14-08ac558a1b57 req-dbf778ec-e6e9-4b62-97a8-eaa65d050027 service nova] [instance: 24a99398-105c-403f-8921-939beb40d447] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.552016] env[68569]: INFO nova.compute.manager [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Detaching volume e7b28a43-bb29-4633-ac1f-9b8194229203 [ 1122.589287] env[68569]: INFO nova.virt.block_device [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Attempting to driver detach volume e7b28a43-bb29-4633-ac1f-9b8194229203 from mountpoint /dev/sdb [ 1122.589549] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1122.589764] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633712', 'volume_id': 'e7b28a43-bb29-4633-ac1f-9b8194229203', 'name': 'volume-e7b28a43-bb29-4633-ac1f-9b8194229203', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '24bcffcc-6da1-4ae5-b802-88e9364eaf0e', 'attached_at': '', 'detached_at': '', 'volume_id': 'e7b28a43-bb29-4633-ac1f-9b8194229203', 'serial': 'e7b28a43-bb29-4633-ac1f-9b8194229203'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1122.590690] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e814b7-076b-4b52-86d0-d2ba53eac35f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.615236] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30801395-de8d-48ac-88ed-c7f855ee4042 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.624039] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-052d97df-fc68-4fcd-8037-93a50b821325 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.647044] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae34201-cb6a-4680-9a36-620ceb637922 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.663819] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] The volume has not been displaced from its original location: [datastore2] volume-e7b28a43-bb29-4633-ac1f-9b8194229203/volume-e7b28a43-bb29-4633-ac1f-9b8194229203.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1122.669362] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Reconfiguring VM instance instance-00000063 to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1122.669705] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e084fec-2aeb-4452-a5a6-d1134ae08cdb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.688633] env[68569]: DEBUG oslo_vmware.api [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1122.688633] env[68569]: value = "task-3167747" [ 1122.688633] env[68569]: _type = "Task" [ 1122.688633] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.697176] env[68569]: DEBUG oslo_vmware.api [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167747, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.734106] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.734106] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.736147] env[68569]: INFO nova.compute.claims [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1122.893652] env[68569]: DEBUG nova.network.neutron [-] [instance: 24a99398-105c-403f-8921-939beb40d447] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.919717] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-747d1919-cd3d-40a4-bf45-3e9759359a91 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.929626] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4b9162-e2a8-4f29-9ce9-ba39ead944aa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.960459] env[68569]: DEBUG nova.compute.manager [req-1f7327f6-c897-4a94-8c14-08ac558a1b57 req-dbf778ec-e6e9-4b62-97a8-eaa65d050027 service nova] [instance: 24a99398-105c-403f-8921-939beb40d447] Detach interface failed, port_id=35bc7caa-5b98-48e6-92f2-3dbb6c559faf, reason: Instance 24a99398-105c-403f-8921-939beb40d447 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1123.199015] env[68569]: DEBUG oslo_vmware.api [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167747, 'name': ReconfigVM_Task, 'duration_secs': 0.253803} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.199271] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Reconfigured VM instance instance-00000063 to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1123.203942] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbd05564-951a-43bd-b638-d4abe971dcf5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.220613] env[68569]: DEBUG oslo_vmware.api [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1123.220613] env[68569]: value = "task-3167748" [ 1123.220613] env[68569]: _type = "Task" [ 1123.220613] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.230262] env[68569]: DEBUG oslo_vmware.api [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167748, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.396166] env[68569]: INFO nova.compute.manager [-] [instance: 24a99398-105c-403f-8921-939beb40d447] Took 1.22 seconds to deallocate network for instance. [ 1123.591915] env[68569]: DEBUG nova.compute.manager [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Stashing vm_state: active {{(pid=68569) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1123.730363] env[68569]: DEBUG oslo_vmware.api [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167748, 'name': ReconfigVM_Task, 'duration_secs': 0.146984} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.730738] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633712', 'volume_id': 'e7b28a43-bb29-4633-ac1f-9b8194229203', 'name': 'volume-e7b28a43-bb29-4633-ac1f-9b8194229203', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '24bcffcc-6da1-4ae5-b802-88e9364eaf0e', 'attached_at': '', 'detached_at': '', 'volume_id': 'e7b28a43-bb29-4633-ac1f-9b8194229203', 'serial': 'e7b28a43-bb29-4633-ac1f-9b8194229203'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1123.889422] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0b8adc-d957-4b81-b02d-90bc61897654 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.898237] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e085f84a-b0ee-425c-87e4-acfa23edc801 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.902197] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.929681] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828de2a1-2678-4020-8750-18ad93154fd0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.938793] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05590c7c-809a-4bbf-8755-ceb31c391dc6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.953447] env[68569]: DEBUG nova.compute.provider_tree [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.107295] env[68569]: DEBUG oslo_concurrency.lockutils [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.274428] env[68569]: DEBUG nova.objects.instance [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lazy-loading 'flavor' on Instance uuid 24bcffcc-6da1-4ae5-b802-88e9364eaf0e {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1124.456598] env[68569]: DEBUG nova.scheduler.client.report [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1124.496821] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.497138] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.497378] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.497584] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.497791] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.500041] env[68569]: INFO nova.compute.manager [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Terminating instance [ 1124.597684] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1124.961418] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.227s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.961946] env[68569]: DEBUG nova.compute.manager [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1124.964495] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.062s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.964682] env[68569]: DEBUG nova.objects.instance [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lazy-loading 'resources' on Instance uuid 24a99398-105c-403f-8921-939beb40d447 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1125.003985] env[68569]: DEBUG nova.compute.manager [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1125.004239] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1125.005348] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fbaf608-3db3-4c44-a0dc-d5b3103ca4b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.014605] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1125.014830] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4cb0fd12-4d9b-43ae-9b36-217a427290e9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.021666] env[68569]: DEBUG oslo_vmware.api [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1125.021666] env[68569]: value = "task-3167749" [ 1125.021666] env[68569]: _type = "Task" [ 1125.021666] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.030826] env[68569]: DEBUG oslo_vmware.api [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167749, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.282933] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6f8c413a-5f50-4a65-ab79-095f56d90f79 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.234s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.467401] env[68569]: DEBUG nova.compute.utils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1125.472024] env[68569]: DEBUG nova.compute.manager [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1125.472024] env[68569]: DEBUG nova.network.neutron [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1125.513973] env[68569]: DEBUG nova.policy [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e16bef4b0a6d4a5e937e4f3c4a3329b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67e07f7ab9ab41feb4d71e1d128d093d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1125.533996] env[68569]: DEBUG oslo_vmware.api [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167749, 'name': PowerOffVM_Task, 'duration_secs': 0.247063} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.534246] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1125.534418] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1125.534665] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2bff9461-359a-4563-b219-b46d1ed60094 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.598049] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1125.598386] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1125.598777] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Deleting the datastore file [datastore2] 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1125.599168] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db86ec05-d0b7-42ee-9141-dfc76f4d8f11 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.607779] env[68569]: DEBUG oslo_vmware.api [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1125.607779] env[68569]: value = "task-3167751" [ 1125.607779] env[68569]: _type = "Task" [ 1125.607779] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.617437] env[68569]: DEBUG oslo_vmware.api [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167751, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.631177] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9f256c-f789-480a-aff6-b3cfa315c54d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.638854] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0c3675-2163-4f4a-8b2f-2a87eb8944df {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.670417] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1368b59-50d6-4ad8-bd23-ba8f88817cb8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.678017] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a301726-4ddb-425e-83d2-39b8376d2e9a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.693216] env[68569]: DEBUG nova.compute.provider_tree [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1125.757161] env[68569]: DEBUG nova.network.neutron [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Successfully created port: 38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1125.972113] env[68569]: DEBUG nova.compute.manager [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1126.117605] env[68569]: DEBUG oslo_vmware.api [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167751, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151972} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.117842] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1126.118046] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1126.118304] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1126.118483] env[68569]: INFO nova.compute.manager [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1126.118724] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1126.118905] env[68569]: DEBUG nova.compute.manager [-] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1126.118997] env[68569]: DEBUG nova.network.neutron [-] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1126.195926] env[68569]: DEBUG nova.scheduler.client.report [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1126.312326] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.312673] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.312948] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.315847] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.315847] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.319358] env[68569]: INFO nova.compute.manager [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Terminating instance [ 1126.600968] env[68569]: DEBUG nova.compute.manager [req-07e51393-51d2-4bad-8fb1-ba7b15dbbd32 req-03fb2116-a7c3-4728-9db7-555e54d22e80 service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Received event network-vif-deleted-feea2419-1c39-4028-811f-c72311dae7a4 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1126.601221] env[68569]: INFO nova.compute.manager [req-07e51393-51d2-4bad-8fb1-ba7b15dbbd32 req-03fb2116-a7c3-4728-9db7-555e54d22e80 service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Neutron deleted interface feea2419-1c39-4028-811f-c72311dae7a4; detaching it from the instance and deleting it from the info cache [ 1126.601411] env[68569]: DEBUG nova.network.neutron [req-07e51393-51d2-4bad-8fb1-ba7b15dbbd32 req-03fb2116-a7c3-4728-9db7-555e54d22e80 service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.700534] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.736s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.702882] env[68569]: DEBUG oslo_concurrency.lockutils [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.596s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.726909] env[68569]: INFO nova.scheduler.client.report [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Deleted allocations for instance 24a99398-105c-403f-8921-939beb40d447 [ 1126.824242] env[68569]: DEBUG nova.compute.manager [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1126.824407] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1126.825554] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51dc5c1a-76e5-4f37-8a6a-e19e4fa2e9ac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.835240] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1126.835499] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1809976f-da88-4550-b5ed-e0579b11389c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.843130] env[68569]: DEBUG oslo_vmware.api [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1126.843130] env[68569]: value = "task-3167752" [ 1126.843130] env[68569]: _type = "Task" [ 1126.843130] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.852448] env[68569]: DEBUG oslo_vmware.api [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167752, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.983029] env[68569]: DEBUG nova.compute.manager [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1127.007977] env[68569]: DEBUG nova.virt.hardware [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1127.008304] env[68569]: DEBUG nova.virt.hardware [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1127.008480] env[68569]: DEBUG nova.virt.hardware [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1127.008667] env[68569]: DEBUG nova.virt.hardware [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1127.008813] env[68569]: DEBUG nova.virt.hardware [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1127.008957] env[68569]: DEBUG nova.virt.hardware [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1127.009202] env[68569]: DEBUG nova.virt.hardware [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1127.009377] env[68569]: DEBUG nova.virt.hardware [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1127.009548] env[68569]: DEBUG nova.virt.hardware [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1127.009708] env[68569]: DEBUG nova.virt.hardware [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1127.009875] env[68569]: DEBUG nova.virt.hardware [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1127.010833] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40235f1c-d055-41b3-aee8-61e83438149b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.019682] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a25260d-7a39-4e8b-bca1-3f05674c9d20 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.070941] env[68569]: DEBUG nova.network.neutron [-] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.105527] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01ac2ea4-2950-4922-801e-558e6e429ef7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.116760] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beec1a06-64cf-4c74-ae3a-e7fc13c09735 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.148763] env[68569]: DEBUG nova.compute.manager [req-07e51393-51d2-4bad-8fb1-ba7b15dbbd32 req-03fb2116-a7c3-4728-9db7-555e54d22e80 service nova] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Detach interface failed, port_id=feea2419-1c39-4028-811f-c72311dae7a4, reason: Instance 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1127.208714] env[68569]: INFO nova.compute.claims [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1127.235688] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5ad5481-3055-4834-97b8-307759fdd1e2 tempest-ImagesTestJSON-885804793 tempest-ImagesTestJSON-885804793-project-member] Lock "24a99398-105c-403f-8921-939beb40d447" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.687s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.305142] env[68569]: DEBUG nova.network.neutron [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Successfully updated port: 38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1127.356620] env[68569]: DEBUG oslo_vmware.api [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167752, 'name': PowerOffVM_Task, 'duration_secs': 0.232127} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.356897] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1127.357082] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1127.357345] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb5dd78b-597e-471c-b3be-2afc94405ccd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.421790] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1127.422066] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1127.422297] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Deleting the datastore file [datastore1] 24bcffcc-6da1-4ae5-b802-88e9364eaf0e {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1127.422596] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4aaf9922-7c4f-4141-88bf-43c3e47e892a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.429572] env[68569]: DEBUG oslo_vmware.api [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1127.429572] env[68569]: value = "task-3167754" [ 1127.429572] env[68569]: _type = "Task" [ 1127.429572] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.438312] env[68569]: DEBUG oslo_vmware.api [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167754, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.574224] env[68569]: INFO nova.compute.manager [-] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Took 1.45 seconds to deallocate network for instance. [ 1127.715251] env[68569]: INFO nova.compute.resource_tracker [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Updating resource usage from migration fe7aea34-051f-4aac-8ebc-041c0564955e [ 1127.812640] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "refresh_cache-fca05228-2a17-4a7e-94a0-449ba74a8933" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.812800] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "refresh_cache-fca05228-2a17-4a7e-94a0-449ba74a8933" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.812948] env[68569]: DEBUG nova.network.neutron [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1127.863252] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b92b17-c65f-4e07-a882-c5e13ea3abde {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.871398] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808a68b8-833c-4211-95c1-5d2906fd93a7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.903711] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f389fd-f538-4655-9211-c7fdc702eec0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.912714] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c309fa6-6181-478c-8064-6b2855c1ed10 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.927398] env[68569]: DEBUG nova.compute.provider_tree [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1127.939380] env[68569]: DEBUG oslo_vmware.api [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167754, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158927} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.939723] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1127.939879] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1127.940087] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1127.940268] env[68569]: INFO nova.compute.manager [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1127.940507] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1127.940687] env[68569]: DEBUG nova.compute.manager [-] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1127.940775] env[68569]: DEBUG nova.network.neutron [-] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1128.081081] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.094328] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.361013] env[68569]: DEBUG nova.network.neutron [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1128.451378] env[68569]: ERROR nova.scheduler.client.report [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [req-41a7c93d-4b54-40bf-a7f2-46e3ca5e65fc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-41a7c93d-4b54-40bf-a7f2-46e3ca5e65fc"}]} [ 1128.467215] env[68569]: DEBUG nova.scheduler.client.report [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1128.482078] env[68569]: DEBUG nova.scheduler.client.report [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1128.482493] env[68569]: DEBUG nova.compute.provider_tree [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1128.493327] env[68569]: DEBUG nova.scheduler.client.report [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1128.516325] env[68569]: DEBUG nova.scheduler.client.report [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1128.557869] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.558235] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.585723] env[68569]: DEBUG nova.network.neutron [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Updating instance_info_cache with network_info: [{"id": "38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e", "address": "fa:16:3e:42:c4:c3", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38c532c8-8e", "ovs_interfaceid": "38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.597011] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.597229] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.635260] env[68569]: DEBUG nova.compute.manager [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Received event network-vif-plugged-38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1128.635473] env[68569]: DEBUG oslo_concurrency.lockutils [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] Acquiring lock "fca05228-2a17-4a7e-94a0-449ba74a8933-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.635682] env[68569]: DEBUG oslo_concurrency.lockutils [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] Lock "fca05228-2a17-4a7e-94a0-449ba74a8933-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.635845] env[68569]: DEBUG oslo_concurrency.lockutils [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] Lock "fca05228-2a17-4a7e-94a0-449ba74a8933-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.636053] env[68569]: DEBUG nova.compute.manager [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] No waiting events found dispatching network-vif-plugged-38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1128.636254] env[68569]: WARNING nova.compute.manager [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Received unexpected event network-vif-plugged-38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e for instance with vm_state building and task_state spawning. [ 1128.636447] env[68569]: DEBUG nova.compute.manager [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Received event network-changed-38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1128.636561] env[68569]: DEBUG nova.compute.manager [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Refreshing instance network info cache due to event network-changed-38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1128.636726] env[68569]: DEBUG oslo_concurrency.lockutils [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] Acquiring lock "refresh_cache-fca05228-2a17-4a7e-94a0-449ba74a8933" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.688182] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3f6653-1a5a-40ba-bd7a-0735fde3b034 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.696785] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501e6fd0-87af-4d16-bc0d-0894e290daff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.728882] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478703f7-08af-43b5-9c93-90403fa6b679 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.737134] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96409e25-f2e2-45a8-b0f4-e934c0d38a49 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.751565] env[68569]: DEBUG nova.compute.provider_tree [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1129.063729] env[68569]: INFO nova.compute.manager [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Detaching volume c8148d10-8bad-49bc-93bf-559f83378a11 [ 1129.088232] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "refresh_cache-fca05228-2a17-4a7e-94a0-449ba74a8933" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.089033] env[68569]: DEBUG nova.compute.manager [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Instance network_info: |[{"id": "38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e", "address": "fa:16:3e:42:c4:c3", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38c532c8-8e", "ovs_interfaceid": "38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1129.089033] env[68569]: DEBUG oslo_concurrency.lockutils [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] Acquired lock "refresh_cache-fca05228-2a17-4a7e-94a0-449ba74a8933" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1129.089353] env[68569]: DEBUG nova.network.neutron [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Refreshing network info cache for port 38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1129.090524] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:c4:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54c45719-5690-47bf-b45b-6cad9813071e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1129.098257] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1129.101230] env[68569]: DEBUG nova.network.neutron [-] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.102207] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1129.103627] env[68569]: INFO nova.virt.block_device [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Attempting to driver detach volume c8148d10-8bad-49bc-93bf-559f83378a11 from mountpoint /dev/sdb [ 1129.103779] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1129.103969] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633698', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'name': 'volume-c8148d10-8bad-49bc-93bf-559f83378a11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3', 'attached_at': '', 'detached_at': '', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'serial': 'c8148d10-8bad-49bc-93bf-559f83378a11'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1129.104396] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-389a7afe-f12c-45f5-b2c7-fee4f8575e5a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.121530] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e96443-0298-49b1-89c9-c361d022e059 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.146517] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150cf09f-26eb-450d-add5-4a4d79882bc3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.149184] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1129.149184] env[68569]: value = "task-3167755" [ 1129.149184] env[68569]: _type = "Task" [ 1129.149184] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.157826] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182ce544-600e-4943-821d-e9e290f30bb8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.163698] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167755, 'name': CreateVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.182688] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e0fb05-7142-4241-9e5a-853b7f9dedf5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.201671] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] The volume has not been displaced from its original location: [datastore1] volume-c8148d10-8bad-49bc-93bf-559f83378a11/volume-c8148d10-8bad-49bc-93bf-559f83378a11.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1129.207241] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Reconfiguring VM instance instance-0000005a to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1129.207690] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-156477f4-f0a6-428b-85ab-54238ce66b5b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.229127] env[68569]: DEBUG oslo_vmware.api [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1129.229127] env[68569]: value = "task-3167756" [ 1129.229127] env[68569]: _type = "Task" [ 1129.229127] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.237927] env[68569]: DEBUG oslo_vmware.api [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167756, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.296392] env[68569]: DEBUG nova.scheduler.client.report [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 159 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1129.296392] env[68569]: DEBUG nova.compute.provider_tree [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 159 to 160 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1129.296392] env[68569]: DEBUG nova.compute.provider_tree [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1129.377480] env[68569]: DEBUG nova.network.neutron [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Updated VIF entry in instance network info cache for port 38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1129.378133] env[68569]: DEBUG nova.network.neutron [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Updating instance_info_cache with network_info: [{"id": "38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e", "address": "fa:16:3e:42:c4:c3", "network": {"id": "dec9abd8-5c1a-4b32-8360-bee4dd634210", "bridge": "br-int", "label": "tempest-ServersTestJSON-902658744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67e07f7ab9ab41feb4d71e1d128d093d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38c532c8-8e", "ovs_interfaceid": "38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.601088] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1129.601088] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1129.601088] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1129.601088] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68569) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11160}} [ 1129.606047] env[68569]: INFO nova.compute.manager [-] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Took 1.66 seconds to deallocate network for instance. [ 1129.662183] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167755, 'name': CreateVM_Task, 'duration_secs': 0.35523} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.662321] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1129.663096] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.663234] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1129.663498] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1129.663745] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33ac2784-e2ac-44f3-9d44-40e5c171f783 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.669184] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1129.669184] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5299c0d0-9516-4e78-f681-35221633cef6" [ 1129.669184] env[68569]: _type = "Task" [ 1129.669184] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.677531] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5299c0d0-9516-4e78-f681-35221633cef6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.738743] env[68569]: DEBUG oslo_vmware.api [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167756, 'name': ReconfigVM_Task, 'duration_secs': 0.249002} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.738970] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Reconfigured VM instance instance-0000005a to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1129.743488] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05627fd2-7666-429e-82ea-032079dc2a3f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.760480] env[68569]: DEBUG oslo_vmware.api [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1129.760480] env[68569]: value = "task-3167757" [ 1129.760480] env[68569]: _type = "Task" [ 1129.760480] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.768350] env[68569]: DEBUG oslo_vmware.api [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167757, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.800813] env[68569]: DEBUG oslo_concurrency.lockutils [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.098s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.801035] env[68569]: INFO nova.compute.manager [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Migrating [ 1129.807288] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.726s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.807509] env[68569]: DEBUG nova.objects.instance [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lazy-loading 'resources' on Instance uuid 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1129.881549] env[68569]: DEBUG oslo_concurrency.lockutils [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] Releasing lock "refresh_cache-fca05228-2a17-4a7e-94a0-449ba74a8933" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.881751] env[68569]: DEBUG nova.compute.manager [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Received event network-vif-deleted-9808ed24-af29-4716-93e9-049a46b044c6 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1129.881932] env[68569]: INFO nova.compute.manager [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Neutron deleted interface 9808ed24-af29-4716-93e9-049a46b044c6; detaching it from the instance and deleting it from the info cache [ 1129.882138] env[68569]: DEBUG nova.network.neutron [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.112216] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.181045] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5299c0d0-9516-4e78-f681-35221633cef6, 'name': SearchDatastore_Task, 'duration_secs': 0.011516} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.181424] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.181705] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1130.181990] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.182188] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.182580] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1130.182713] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-478cf04b-f02f-4d86-b221-4f4efcb1a829 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.192467] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1130.192696] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1130.193438] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2258d40e-b40d-4477-ba02-c4aad1838ccf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.199055] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1130.199055] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bea908-e558-0881-335a-f413102800a7" [ 1130.199055] env[68569]: _type = "Task" [ 1130.199055] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.207181] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bea908-e558-0881-335a-f413102800a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.274288] env[68569]: DEBUG oslo_vmware.api [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167757, 'name': ReconfigVM_Task, 'duration_secs': 0.152421} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.274288] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633698', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'name': 'volume-c8148d10-8bad-49bc-93bf-559f83378a11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3', 'attached_at': '', 'detached_at': '', 'volume_id': 'c8148d10-8bad-49bc-93bf-559f83378a11', 'serial': 'c8148d10-8bad-49bc-93bf-559f83378a11'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1130.319678] env[68569]: DEBUG oslo_concurrency.lockutils [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.319862] env[68569]: DEBUG oslo_concurrency.lockutils [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.320046] env[68569]: DEBUG nova.network.neutron [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1130.385096] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d80af709-1c79-4684-ae16-0b45a4886e76 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.395867] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a2d618-71ad-415d-9127-a32132a3c460 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.432821] env[68569]: DEBUG nova.compute.manager [req-6b273817-56e0-4c46-99d7-ade22249295e req-51bc788b-1296-41dc-b3af-ea3ce2d684ef service nova] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Detach interface failed, port_id=9808ed24-af29-4716-93e9-049a46b044c6, reason: Instance 24bcffcc-6da1-4ae5-b802-88e9364eaf0e could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1130.492629] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd75fb23-9216-448b-951e-157d6bd76231 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.501692] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67daf84-a005-48be-bbae-aff29faec09d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.536853] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af74dfe-ab1b-4251-9202-ef26fd290a4c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.545724] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2323db-b33d-4c18-98a0-c1adea0961ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.560074] env[68569]: DEBUG nova.compute.provider_tree [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1130.597169] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.597430] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.712063] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bea908-e558-0881-335a-f413102800a7, 'name': SearchDatastore_Task, 'duration_secs': 0.024101} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.712660] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb280373-20b8-4e21-b906-8bfd31e84cc4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.718326] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1130.718326] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f0f04c-b455-d886-97b6-fcec240a41c0" [ 1130.718326] env[68569]: _type = "Task" [ 1130.718326] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.726541] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f0f04c-b455-d886-97b6-fcec240a41c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.820785] env[68569]: DEBUG nova.objects.instance [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lazy-loading 'flavor' on Instance uuid a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1131.063959] env[68569]: DEBUG nova.scheduler.client.report [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1131.100212] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.114364] env[68569]: DEBUG nova.network.neutron [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Updating instance_info_cache with network_info: [{"id": "4711426c-2c79-4cc9-8144-86c583dd1fc2", "address": "fa:16:3e:58:9a:77", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4711426c-2c", "ovs_interfaceid": "4711426c-2c79-4cc9-8144-86c583dd1fc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.229620] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f0f04c-b455-d886-97b6-fcec240a41c0, 'name': SearchDatastore_Task, 'duration_secs': 0.017528} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.229903] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.230164] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] fca05228-2a17-4a7e-94a0-449ba74a8933/fca05228-2a17-4a7e-94a0-449ba74a8933.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1131.230422] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4623b9a2-58aa-4290-8cfe-142e5bc69534 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.238397] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1131.238397] env[68569]: value = "task-3167758" [ 1131.238397] env[68569]: _type = "Task" [ 1131.238397] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.246697] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167758, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.569339] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.762s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.572625] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.460s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.572625] env[68569]: DEBUG nova.objects.instance [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lazy-loading 'resources' on Instance uuid 24bcffcc-6da1-4ae5-b802-88e9364eaf0e {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1131.603533] env[68569]: INFO nova.scheduler.client.report [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Deleted allocations for instance 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948 [ 1131.616733] env[68569]: DEBUG oslo_concurrency.lockutils [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "refresh_cache-a9e87dfc-6e00-4e55-8a8f-bc3174b991da" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.749638] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167758, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485098} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.749904] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] fca05228-2a17-4a7e-94a0-449ba74a8933/fca05228-2a17-4a7e-94a0-449ba74a8933.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1131.750139] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1131.750431] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ec25ea9-37f2-4ad4-a8cd-81fbc72f7228 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.758583] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1131.758583] env[68569]: value = "task-3167759" [ 1131.758583] env[68569]: _type = "Task" [ 1131.758583] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.767013] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167759, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.830254] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d1507bd0-085c-4ccd-9a78-23344fca3bf6 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.272s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.959567] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.959841] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.960054] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.960238] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.960455] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.963318] env[68569]: INFO nova.compute.manager [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Terminating instance [ 1132.117113] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b0099195-4c8c-4167-9dd7-d348848ffd0e tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.620s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.232780] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05eae1d-db16-41b7-aca9-4abc1abac9ed {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.241014] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca5de5e-1be1-4e82-b516-8666dac365b6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.275526] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72036bf-3314-4ae8-8c70-dea4aefd42bb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.285266] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167759, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072312} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.285693] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1132.286924] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0aa9ad1-4a71-45c1-8b49-7482118e3cbf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.291036] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd4a84c-3c16-4d1f-a904-dbcb75214691 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.303904] env[68569]: DEBUG nova.compute.provider_tree [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1132.324337] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] fca05228-2a17-4a7e-94a0-449ba74a8933/fca05228-2a17-4a7e-94a0-449ba74a8933.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1132.325538] env[68569]: DEBUG nova.scheduler.client.report [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1132.328741] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2d3b84a-c1b5-4d75-9621-8881604dcca0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.353020] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1132.353020] env[68569]: value = "task-3167760" [ 1132.353020] env[68569]: _type = "Task" [ 1132.353020] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.361103] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167760, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.471130] env[68569]: DEBUG nova.compute.manager [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1132.471130] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1132.471130] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0472f27-8bd3-489f-a846-ef9aa884c632 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.484017] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1132.484017] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99886707-7e90-474a-b319-4bc4796110e0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.491566] env[68569]: DEBUG oslo_vmware.api [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1132.491566] env[68569]: value = "task-3167761" [ 1132.491566] env[68569]: _type = "Task" [ 1132.491566] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.500936] env[68569]: DEBUG oslo_vmware.api [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167761, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.847330] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.274s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.848899] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.749s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.849324] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.849545] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68569) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1132.850545] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c25916-46e3-4a31-94ce-89f7d8cb5eac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.862579] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167760, 'name': ReconfigVM_Task, 'duration_secs': 0.294991} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.865643] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Reconfigured VM instance instance-0000006b to attach disk [datastore2] fca05228-2a17-4a7e-94a0-449ba74a8933/fca05228-2a17-4a7e-94a0-449ba74a8933.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1132.866361] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-47401335-d5e1-44d8-9f66-e6cd6c604378 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.869440] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bcd166-f0fc-4e8d-8c9b-39a8f72a387e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.886626] env[68569]: INFO nova.scheduler.client.report [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Deleted allocations for instance 24bcffcc-6da1-4ae5-b802-88e9364eaf0e [ 1132.893321] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a0497a-3a1a-4f96-9a7f-700e68b1049c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.896626] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1132.896626] env[68569]: value = "task-3167762" [ 1132.896626] env[68569]: _type = "Task" [ 1132.896626] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.907989] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ffafefd-cab7-4e13-9cbd-77113f9f55a1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.915723] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167762, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.949845] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179159MB free_disk=129GB free_vcpus=48 pci_devices=None {{(pid=68569) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1132.950058] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.950415] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.003397] env[68569]: DEBUG oslo_vmware.api [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167761, 'name': PowerOffVM_Task, 'duration_secs': 0.207003} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.003397] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1133.003603] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1133.003803] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8be07b8-c079-4d70-b783-77927dd14dbf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.411021] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167762, 'name': Rename_Task, 'duration_secs': 0.169119} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.411296] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1133.411525] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8b25c27-63d6-4a00-abf7-f0d6911d5c10 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.414490] env[68569]: DEBUG oslo_concurrency.lockutils [None req-20d58693-0638-4ca7-8f59-b62d8dfbd40b tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "24bcffcc-6da1-4ae5-b802-88e9364eaf0e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.101s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.421030] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1133.421030] env[68569]: value = "task-3167764" [ 1133.421030] env[68569]: _type = "Task" [ 1133.421030] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.431543] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167764, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.934175] env[68569]: DEBUG oslo_vmware.api [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167764, 'name': PowerOnVM_Task, 'duration_secs': 0.510418} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.935188] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1133.935926] env[68569]: INFO nova.compute.manager [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Took 6.95 seconds to spawn the instance on the hypervisor. [ 1133.938018] env[68569]: DEBUG nova.compute.manager [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1133.938018] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd57162-cc4d-4c4b-b887-ad30699c2835 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.960032] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Applying migration context for instance a9e87dfc-6e00-4e55-8a8f-bc3174b991da as it has an incoming, in-progress migration fe7aea34-051f-4aac-8ebc-041c0564955e. Migration status is migrating {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1133.961356] env[68569]: INFO nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Updating resource usage from migration fe7aea34-051f-4aac-8ebc-041c0564955e [ 1134.025888] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "fdcdd4b5-82bd-43c9-8865-807f86789a99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.027230] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "fdcdd4b5-82bd-43c9-8865-807f86789a99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.093971] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 61aa0997-ffa6-4551-bdaa-132026e240f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.094103] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance db75de86-9dda-42b2-9e7a-55e2ba5adad1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.094227] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance f5f8b054-7ee4-40f5-84de-1cee02949cd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.094346] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.094464] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance ee188712-b0e0-44ee-80b4-be72da32299f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.094577] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.094690] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance fca05228-2a17-4a7e-94a0-449ba74a8933 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.094865] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Migration fe7aea34-051f-4aac-8ebc-041c0564955e is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1134.095073] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance a9e87dfc-6e00-4e55-8a8f-bc3174b991da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.139627] env[68569]: ERROR nova.compute.manager [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Setting instance vm_state to ERROR: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1134.139627] env[68569]: ERROR nova.compute.manager [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Traceback (most recent call last): [ 1134.139627] env[68569]: ERROR nova.compute.manager [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] File "/opt/stack/nova/nova/compute/manager.py", line 11478, in _error_out_instance_on_exception [ 1134.139627] env[68569]: ERROR nova.compute.manager [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] yield [ 1134.139627] env[68569]: ERROR nova.compute.manager [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 1134.139627] env[68569]: ERROR nova.compute.manager [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] disk_info = self.driver.migrate_disk_and_power_off( [ 1134.139627] env[68569]: ERROR nova.compute.manager [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1134.139627] env[68569]: ERROR nova.compute.manager [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] [ 1134.217093] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1134.217333] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1134.217511] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleting the datastore file [datastore1] a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1134.217783] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72dfc797-8983-44f1-876d-fb493e10f013 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.226589] env[68569]: DEBUG oslo_vmware.api [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1134.226589] env[68569]: value = "task-3167765" [ 1134.226589] env[68569]: _type = "Task" [ 1134.226589] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.238081] env[68569]: DEBUG oslo_vmware.api [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167765, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.460246] env[68569]: INFO nova.compute.manager [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Took 11.74 seconds to build instance. [ 1134.529314] env[68569]: DEBUG nova.compute.manager [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1134.598132] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance fdcdd4b5-82bd-43c9-8865-807f86789a99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1134.598413] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1134.598822] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2304MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1134.657208] env[68569]: INFO nova.compute.manager [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Swapping old allocation on dict_keys(['a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6']) held by migration fe7aea34-051f-4aac-8ebc-041c0564955e for instance [ 1134.674279] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04ea422e-ed69-48e8-8ace-3ce292d8a757 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "interface-ee188712-b0e0-44ee-80b4-be72da32299f-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.674867] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04ea422e-ed69-48e8-8ace-3ce292d8a757 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-ee188712-b0e0-44ee-80b4-be72da32299f-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.674867] env[68569]: DEBUG nova.objects.instance [None req-04ea422e-ed69-48e8-8ace-3ce292d8a757 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'flavor' on Instance uuid ee188712-b0e0-44ee-80b4-be72da32299f {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1134.683261] env[68569]: DEBUG nova.scheduler.client.report [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Overwriting current allocation {'allocations': {'a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 162}}, 'project_id': 'aa0ab47201c64b0d87480d4ff90014f5', 'user_id': '73b1c309d1494888945f033a8c5140a5', 'consumer_generation': 1} on consumer a9e87dfc-6e00-4e55-8a8f-bc3174b991da {{(pid=68569) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1134.737337] env[68569]: DEBUG oslo_vmware.api [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167765, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163359} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.738130] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1134.738130] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1134.738130] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1134.738340] env[68569]: INFO nova.compute.manager [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Took 2.27 seconds to destroy the instance on the hypervisor. [ 1134.738429] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1134.738625] env[68569]: DEBUG nova.compute.manager [-] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1134.738714] env[68569]: DEBUG nova.network.neutron [-] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1134.753030] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63831308-1147-4d48-b228-ef18fb55472c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.763790] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d5519d-930d-42c1-a1f9-4d80f78cf9ea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.796612] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294beb4a-cac1-4741-bf56-c180eae81860 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.804776] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c6518c-0e3b-443d-a1c9-6523696eb865 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.820208] env[68569]: DEBUG nova.compute.provider_tree [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1134.962840] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e9d416d-a3a8-452b-bdb2-4a2a8aa04ba2 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "fca05228-2a17-4a7e-94a0-449ba74a8933" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.251s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.041626] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.041853] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.057309] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.178803] env[68569]: DEBUG nova.objects.instance [None req-04ea422e-ed69-48e8-8ace-3ce292d8a757 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'pci_requests' on Instance uuid ee188712-b0e0-44ee-80b4-be72da32299f {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1135.324888] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1135.527217] env[68569]: DEBUG nova.compute.manager [req-9fc9a696-57d1-41e6-b341-f5826db08f6e req-f462d389-5b1a-4d81-87cd-3da004cee496 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Received event network-vif-deleted-f0211609-4c97-40c7-ba53-6f3802350533 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1135.527447] env[68569]: INFO nova.compute.manager [req-9fc9a696-57d1-41e6-b341-f5826db08f6e req-f462d389-5b1a-4d81-87cd-3da004cee496 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Neutron deleted interface f0211609-4c97-40c7-ba53-6f3802350533; detaching it from the instance and deleting it from the info cache [ 1135.527697] env[68569]: DEBUG nova.network.neutron [req-9fc9a696-57d1-41e6-b341-f5826db08f6e req-f462d389-5b1a-4d81-87cd-3da004cee496 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.543645] env[68569]: DEBUG nova.compute.manager [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1135.681779] env[68569]: DEBUG nova.objects.base [None req-04ea422e-ed69-48e8-8ace-3ce292d8a757 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1135.682037] env[68569]: DEBUG nova.network.neutron [None req-04ea422e-ed69-48e8-8ace-3ce292d8a757 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1135.756642] env[68569]: DEBUG oslo_concurrency.lockutils [None req-04ea422e-ed69-48e8-8ace-3ce292d8a757 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-ee188712-b0e0-44ee-80b4-be72da32299f-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.082s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.813186] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d6c0f455-db00-4dc3-89cb-53052c41fab9 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "fca05228-2a17-4a7e-94a0-449ba74a8933" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.813186] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d6c0f455-db00-4dc3-89cb-53052c41fab9 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "fca05228-2a17-4a7e-94a0-449ba74a8933" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.813186] env[68569]: DEBUG nova.compute.manager [None req-d6c0f455-db00-4dc3-89cb-53052c41fab9 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1135.814055] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556b6def-80f6-4005-abaf-832f1074bcbc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.821623] env[68569]: DEBUG nova.compute.manager [None req-d6c0f455-db00-4dc3-89cb-53052c41fab9 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68569) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1135.822353] env[68569]: DEBUG nova.objects.instance [None req-d6c0f455-db00-4dc3-89cb-53052c41fab9 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lazy-loading 'flavor' on Instance uuid fca05228-2a17-4a7e-94a0-449ba74a8933 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1135.829710] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1135.829936] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.880s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.830231] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.773s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.831902] env[68569]: INFO nova.compute.claims [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1135.834660] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.834951] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Cleaning up deleted instances {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11841}} [ 1135.890345] env[68569]: DEBUG nova.network.neutron [-] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.032729] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef055b4d-6b0e-4bb9-bed6-8ec65ae0b35d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.043343] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa7ff11-06c2-4616-b0e7-9372483b5b18 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.067947] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.076696] env[68569]: DEBUG nova.compute.manager [req-9fc9a696-57d1-41e6-b341-f5826db08f6e req-f462d389-5b1a-4d81-87cd-3da004cee496 service nova] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Detach interface failed, port_id=f0211609-4c97-40c7-ba53-6f3802350533, reason: Instance a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1136.255558] env[68569]: DEBUG oslo_concurrency.lockutils [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.353416] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] There are 49 instances to clean {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11850}} [ 1136.353683] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 415b9040-15c3-472f-b427-c39e2c456764] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1136.392765] env[68569]: INFO nova.compute.manager [-] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Took 1.65 seconds to deallocate network for instance. [ 1136.482499] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.482736] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.521473] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa59dda-dad1-4709-a755-94bde53a0646 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.530948] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e02b2f-69ad-4d76-956e-2cf9405b8b25 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.564369] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd2a3dc2-2b9b-4d29-bcb3-0f9dfd633126 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.572824] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca87959-1886-49b6-bfb1-371764d42654 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.587270] env[68569]: DEBUG nova.compute.provider_tree [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1136.829155] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6c0f455-db00-4dc3-89cb-53052c41fab9 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1136.829492] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-595368c5-bb0e-4c7f-9883-8451ed336c99 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.837238] env[68569]: DEBUG oslo_vmware.api [None req-d6c0f455-db00-4dc3-89cb-53052c41fab9 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1136.837238] env[68569]: value = "task-3167766" [ 1136.837238] env[68569]: _type = "Task" [ 1136.837238] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.846190] env[68569]: DEBUG oslo_vmware.api [None req-d6c0f455-db00-4dc3-89cb-53052c41fab9 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167766, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.859806] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 24a99398-105c-403f-8921-939beb40d447] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1136.902644] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.985880] env[68569]: DEBUG nova.compute.manager [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1137.090778] env[68569]: DEBUG nova.scheduler.client.report [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1137.204543] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.205030] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.205433] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.206137] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.206361] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.209121] env[68569]: INFO nova.compute.manager [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Terminating instance [ 1137.347615] env[68569]: DEBUG oslo_vmware.api [None req-d6c0f455-db00-4dc3-89cb-53052c41fab9 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167766, 'name': PowerOffVM_Task, 'duration_secs': 0.241199} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.347882] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6c0f455-db00-4dc3-89cb-53052c41fab9 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1137.348115] env[68569]: DEBUG nova.compute.manager [None req-d6c0f455-db00-4dc3-89cb-53052c41fab9 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1137.348883] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ed6ba9-ab05-4485-bca9-4bf08e258bd2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.362512] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: fd34691f-ebe5-4b40-994c-6278e09fc9eb] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1137.508956] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.596460] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.766s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.597014] env[68569]: DEBUG nova.compute.manager [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1137.599750] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.532s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.601267] env[68569]: INFO nova.compute.claims [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1137.714586] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "interface-ee188712-b0e0-44ee-80b4-be72da32299f-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.714832] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-ee188712-b0e0-44ee-80b4-be72da32299f-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.715181] env[68569]: DEBUG nova.objects.instance [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'flavor' on Instance uuid ee188712-b0e0-44ee-80b4-be72da32299f {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1137.716889] env[68569]: DEBUG nova.compute.manager [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1137.717400] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1137.718189] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0827f297-d1da-4360-9d6f-a453f65d6085 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.726483] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1137.726726] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7e584a5-af48-446a-922f-16f230d4a249 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.735045] env[68569]: DEBUG oslo_vmware.api [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1137.735045] env[68569]: value = "task-3167767" [ 1137.735045] env[68569]: _type = "Task" [ 1137.735045] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.743396] env[68569]: DEBUG oslo_vmware.api [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.860427] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d6c0f455-db00-4dc3-89cb-53052c41fab9 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "fca05228-2a17-4a7e-94a0-449ba74a8933" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.047s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.865060] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 7c71799e-21d9-41f8-b35c-7117354a0287] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1138.107832] env[68569]: DEBUG nova.compute.utils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1138.110054] env[68569]: DEBUG nova.compute.manager [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1138.110054] env[68569]: DEBUG nova.network.neutron [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1138.172792] env[68569]: DEBUG nova.policy [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b8671c22fa042d28350b219ac52d775', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '335f83fe0b8e42aa80e8f0691b609649', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1138.244525] env[68569]: DEBUG oslo_vmware.api [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167767, 'name': PowerOffVM_Task, 'duration_secs': 0.280047} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.244770] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1138.244932] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1138.245518] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f71fd2c-4790-4d80-a89c-ac9bd9f4d6db {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.317210] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1138.317466] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1138.317643] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Deleting the datastore file [datastore2] a9e87dfc-6e00-4e55-8a8f-bc3174b991da {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1138.317917] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-682c2bb9-047b-4d41-a854-78b9ace719bf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.325074] env[68569]: DEBUG oslo_vmware.api [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1138.325074] env[68569]: value = "task-3167769" [ 1138.325074] env[68569]: _type = "Task" [ 1138.325074] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.333294] env[68569]: DEBUG oslo_vmware.api [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167769, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.360332] env[68569]: DEBUG nova.objects.instance [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'pci_requests' on Instance uuid ee188712-b0e0-44ee-80b4-be72da32299f {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1138.368097] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 5eb7ff96-3a9e-470d-9cbe-644446302ecf] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1138.571042] env[68569]: DEBUG nova.network.neutron [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Successfully created port: 234f9512-640a-4c30-9ae1-166d66a910bc {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1138.613443] env[68569]: DEBUG nova.compute.manager [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1138.764224] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d0ced5-2f88-46a5-b341-f0e5ae1a669b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.772619] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702db0cc-d152-4a82-b7dc-1de2ca5c6769 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.803108] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d51b28-0f18-4a86-a074-0b443024305a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.811597] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fddf4a9a-a10c-4107-8099-633e2b366bf8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.825811] env[68569]: DEBUG nova.compute.provider_tree [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1138.836504] env[68569]: DEBUG oslo_vmware.api [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167769, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148333} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.836750] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1138.836932] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1138.837120] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1138.837290] env[68569]: INFO nova.compute.manager [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1138.837520] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1138.837697] env[68569]: DEBUG nova.compute.manager [-] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1138.837789] env[68569]: DEBUG nova.network.neutron [-] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1138.862719] env[68569]: DEBUG nova.objects.base [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1138.862938] env[68569]: DEBUG nova.network.neutron [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1138.870453] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 24bcffcc-6da1-4ae5-b802-88e9364eaf0e] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1138.899734] env[68569]: DEBUG nova.policy [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b5878b8c7304fce9e150e9be38f10c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7613bcf5361d4b08a8d864e59b7fe858', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1139.073821] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "fca05228-2a17-4a7e-94a0-449ba74a8933" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.074125] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "fca05228-2a17-4a7e-94a0-449ba74a8933" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.074340] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "fca05228-2a17-4a7e-94a0-449ba74a8933-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.074571] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "fca05228-2a17-4a7e-94a0-449ba74a8933-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.074794] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "fca05228-2a17-4a7e-94a0-449ba74a8933-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.076894] env[68569]: INFO nova.compute.manager [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Terminating instance [ 1139.152824] env[68569]: DEBUG nova.network.neutron [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Successfully created port: b197187f-ff62-4584-bbfa-1eacf3b6d13a {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1139.300147] env[68569]: DEBUG nova.compute.manager [req-eb308a37-a4af-4387-81f6-0478b152cd4a req-de03c90e-a91d-4138-afbe-c736acd2d4a3 service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Received event network-vif-deleted-4711426c-2c79-4cc9-8144-86c583dd1fc2 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1139.300374] env[68569]: INFO nova.compute.manager [req-eb308a37-a4af-4387-81f6-0478b152cd4a req-de03c90e-a91d-4138-afbe-c736acd2d4a3 service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Neutron deleted interface 4711426c-2c79-4cc9-8144-86c583dd1fc2; detaching it from the instance and deleting it from the info cache [ 1139.300572] env[68569]: DEBUG nova.network.neutron [req-eb308a37-a4af-4387-81f6-0478b152cd4a req-de03c90e-a91d-4138-afbe-c736acd2d4a3 service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.332275] env[68569]: DEBUG nova.scheduler.client.report [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1139.372917] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: d7a0631c-902d-4653-b900-2123de5bcb44] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1139.581407] env[68569]: DEBUG nova.compute.manager [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1139.581550] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1139.582444] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b249365d-ca1b-4cd7-97a4-176e03a05615 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.590615] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1139.590872] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55119e90-6143-4084-881c-3005bdaffaa2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.630068] env[68569]: DEBUG nova.compute.manager [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1139.658242] env[68569]: DEBUG nova.virt.hardware [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1139.658529] env[68569]: DEBUG nova.virt.hardware [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.658686] env[68569]: DEBUG nova.virt.hardware [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1139.659243] env[68569]: DEBUG nova.virt.hardware [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.659243] env[68569]: DEBUG nova.virt.hardware [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1139.659243] env[68569]: DEBUG nova.virt.hardware [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1139.659443] env[68569]: DEBUG nova.virt.hardware [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1139.659504] env[68569]: DEBUG nova.virt.hardware [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1139.659663] env[68569]: DEBUG nova.virt.hardware [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1139.659817] env[68569]: DEBUG nova.virt.hardware [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1139.659981] env[68569]: DEBUG nova.virt.hardware [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1139.660857] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31427812-ab8d-4a08-b2cf-ebdfb09d28e5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.664508] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1139.664705] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1139.664880] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleting the datastore file [datastore2] fca05228-2a17-4a7e-94a0-449ba74a8933 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1139.665474] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-45d11837-ca42-43a1-a986-613f556f8381 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.671015] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f756e348-b7d3-456b-9d3b-50a5c7907f58 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.676136] env[68569]: DEBUG oslo_vmware.api [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1139.676136] env[68569]: value = "task-3167771" [ 1139.676136] env[68569]: _type = "Task" [ 1139.676136] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.692220] env[68569]: DEBUG oslo_vmware.api [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167771, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.773057] env[68569]: DEBUG nova.network.neutron [-] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.803843] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d92bfa1-0247-4a7e-b36e-7b3a852bbbe6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.814237] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19227b1e-e16c-48b9-b4a3-87b38f6e6f30 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.845640] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.246s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.846262] env[68569]: DEBUG nova.compute.manager [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1139.848958] env[68569]: DEBUG nova.compute.manager [req-eb308a37-a4af-4387-81f6-0478b152cd4a req-de03c90e-a91d-4138-afbe-c736acd2d4a3 service nova] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Detach interface failed, port_id=4711426c-2c79-4cc9-8144-86c583dd1fc2, reason: Instance a9e87dfc-6e00-4e55-8a8f-bc3174b991da could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1139.849630] env[68569]: DEBUG oslo_concurrency.lockutils [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.594s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.876069] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 6e70ddce-9b27-4196-b0e7-c32d3f8d5ae3] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1140.018236] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed7475e-23d5-4a87-b52c-f30bdbe67d00 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.028024] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0effc2e-c5b6-40a2-a03e-8471805b7b8b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.057111] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b157d65-3257-405e-97ca-ceb2d6215840 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.064899] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5aaf09e-7496-4487-bb9e-4ae607897a8e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.069271] env[68569]: DEBUG nova.network.neutron [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Successfully updated port: 234f9512-640a-4c30-9ae1-166d66a910bc {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1140.081785] env[68569]: DEBUG nova.compute.provider_tree [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1140.185967] env[68569]: DEBUG oslo_vmware.api [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167771, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138948} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.186383] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1140.186484] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1140.186701] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1140.186901] env[68569]: INFO nova.compute.manager [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1140.187196] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1140.187435] env[68569]: DEBUG nova.compute.manager [-] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1140.187558] env[68569]: DEBUG nova.network.neutron [-] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1140.276366] env[68569]: INFO nova.compute.manager [-] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Took 1.44 seconds to deallocate network for instance. [ 1140.355045] env[68569]: DEBUG nova.compute.utils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1140.356644] env[68569]: DEBUG nova.compute.manager [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1140.356844] env[68569]: DEBUG nova.network.neutron [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1140.379741] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 16112ff1-bda8-4a20-b69c-b847ade376b4] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1140.423703] env[68569]: DEBUG nova.policy [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '575076a4e45f4a9fb3e804856d83094f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '713d88f021794769a64eef3807ade9be', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1140.572800] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "refresh_cache-fdcdd4b5-82bd-43c9-8865-807f86789a99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.572933] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired lock "refresh_cache-fdcdd4b5-82bd-43c9-8865-807f86789a99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.573154] env[68569]: DEBUG nova.network.neutron [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1140.585165] env[68569]: DEBUG nova.scheduler.client.report [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1140.601255] env[68569]: DEBUG nova.compute.manager [req-7cbbc43e-64db-408e-a117-94e7950ea2e2 req-3db904e0-128e-4664-bfc9-c9b6d6084043 service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Received event network-vif-plugged-b197187f-ff62-4584-bbfa-1eacf3b6d13a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1140.601368] env[68569]: DEBUG oslo_concurrency.lockutils [req-7cbbc43e-64db-408e-a117-94e7950ea2e2 req-3db904e0-128e-4664-bfc9-c9b6d6084043 service nova] Acquiring lock "ee188712-b0e0-44ee-80b4-be72da32299f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1140.601587] env[68569]: DEBUG oslo_concurrency.lockutils [req-7cbbc43e-64db-408e-a117-94e7950ea2e2 req-3db904e0-128e-4664-bfc9-c9b6d6084043 service nova] Lock "ee188712-b0e0-44ee-80b4-be72da32299f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.601760] env[68569]: DEBUG oslo_concurrency.lockutils [req-7cbbc43e-64db-408e-a117-94e7950ea2e2 req-3db904e0-128e-4664-bfc9-c9b6d6084043 service nova] Lock "ee188712-b0e0-44ee-80b4-be72da32299f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.601922] env[68569]: DEBUG nova.compute.manager [req-7cbbc43e-64db-408e-a117-94e7950ea2e2 req-3db904e0-128e-4664-bfc9-c9b6d6084043 service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] No waiting events found dispatching network-vif-plugged-b197187f-ff62-4584-bbfa-1eacf3b6d13a {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1140.604333] env[68569]: WARNING nova.compute.manager [req-7cbbc43e-64db-408e-a117-94e7950ea2e2 req-3db904e0-128e-4664-bfc9-c9b6d6084043 service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Received unexpected event network-vif-plugged-b197187f-ff62-4584-bbfa-1eacf3b6d13a for instance with vm_state active and task_state None. [ 1140.655528] env[68569]: DEBUG nova.network.neutron [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Successfully updated port: b197187f-ff62-4584-bbfa-1eacf3b6d13a {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1140.750526] env[68569]: DEBUG nova.network.neutron [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Successfully created port: a6dc900f-8486-4e42-9982-a87df2d989b9 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1140.783172] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1140.863127] env[68569]: DEBUG nova.compute.manager [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1140.883929] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 4c122cff-f64c-4e4f-9454-034c44ff246b] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1140.964041] env[68569]: DEBUG nova.network.neutron [-] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.093015] env[68569]: DEBUG oslo_concurrency.lockutils [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.241s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.093015] env[68569]: INFO nova.compute.manager [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Successfully reverted task state from resize_migrating on failure for instance. [ 1141.098360] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.196s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.098893] env[68569]: DEBUG nova.objects.instance [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lazy-loading 'resources' on Instance uuid a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server [None req-031f9237-03a7-4e9c-8ead-a347ebed2a9d tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Exception during message handling: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 168, in decorated_function [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 159, in decorated_function [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 215, in decorated_function [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 205, in decorated_function [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6583, in resize_instance [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6580, in resize_instance [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server self._resize_instance(context, instance, image, migration, [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server disk_info = self.driver.migrate_disk_and_power_off( [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1141.103766] env[68569]: ERROR oslo_messaging.rpc.server [ 1141.115340] env[68569]: DEBUG nova.network.neutron [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1141.160901] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.160901] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.160901] env[68569]: DEBUG nova.network.neutron [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1141.243055] env[68569]: DEBUG nova.network.neutron [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Updating instance_info_cache with network_info: [{"id": "234f9512-640a-4c30-9ae1-166d66a910bc", "address": "fa:16:3e:77:b3:c7", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap234f9512-64", "ovs_interfaceid": "234f9512-640a-4c30-9ae1-166d66a910bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.327695] env[68569]: DEBUG nova.compute.manager [req-ee830d05-a5c8-43e7-adf5-ec752cf6519a req-0159ed91-de82-4410-b40e-ece522aaa704 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Received event network-vif-plugged-234f9512-640a-4c30-9ae1-166d66a910bc {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1141.327921] env[68569]: DEBUG oslo_concurrency.lockutils [req-ee830d05-a5c8-43e7-adf5-ec752cf6519a req-0159ed91-de82-4410-b40e-ece522aaa704 service nova] Acquiring lock "fdcdd4b5-82bd-43c9-8865-807f86789a99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.328187] env[68569]: DEBUG oslo_concurrency.lockutils [req-ee830d05-a5c8-43e7-adf5-ec752cf6519a req-0159ed91-de82-4410-b40e-ece522aaa704 service nova] Lock "fdcdd4b5-82bd-43c9-8865-807f86789a99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.328362] env[68569]: DEBUG oslo_concurrency.lockutils [req-ee830d05-a5c8-43e7-adf5-ec752cf6519a req-0159ed91-de82-4410-b40e-ece522aaa704 service nova] Lock "fdcdd4b5-82bd-43c9-8865-807f86789a99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.328532] env[68569]: DEBUG nova.compute.manager [req-ee830d05-a5c8-43e7-adf5-ec752cf6519a req-0159ed91-de82-4410-b40e-ece522aaa704 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] No waiting events found dispatching network-vif-plugged-234f9512-640a-4c30-9ae1-166d66a910bc {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1141.328698] env[68569]: WARNING nova.compute.manager [req-ee830d05-a5c8-43e7-adf5-ec752cf6519a req-0159ed91-de82-4410-b40e-ece522aaa704 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Received unexpected event network-vif-plugged-234f9512-640a-4c30-9ae1-166d66a910bc for instance with vm_state building and task_state spawning. [ 1141.328858] env[68569]: DEBUG nova.compute.manager [req-ee830d05-a5c8-43e7-adf5-ec752cf6519a req-0159ed91-de82-4410-b40e-ece522aaa704 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Received event network-changed-234f9512-640a-4c30-9ae1-166d66a910bc {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1141.329023] env[68569]: DEBUG nova.compute.manager [req-ee830d05-a5c8-43e7-adf5-ec752cf6519a req-0159ed91-de82-4410-b40e-ece522aaa704 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Refreshing instance network info cache due to event network-changed-234f9512-640a-4c30-9ae1-166d66a910bc. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1141.329346] env[68569]: DEBUG oslo_concurrency.lockutils [req-ee830d05-a5c8-43e7-adf5-ec752cf6519a req-0159ed91-de82-4410-b40e-ece522aaa704 service nova] Acquiring lock "refresh_cache-fdcdd4b5-82bd-43c9-8865-807f86789a99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.387368] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 671f7e6d-703e-48a9-8509-2a8924afe911] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1141.465547] env[68569]: INFO nova.compute.manager [-] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Took 1.28 seconds to deallocate network for instance. [ 1141.699492] env[68569]: WARNING nova.network.neutron [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] 01780a40-9441-415d-988a-24afe70ba382 already exists in list: networks containing: ['01780a40-9441-415d-988a-24afe70ba382']. ignoring it [ 1141.745748] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lock "refresh_cache-fdcdd4b5-82bd-43c9-8865-807f86789a99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.746136] env[68569]: DEBUG nova.compute.manager [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Instance network_info: |[{"id": "234f9512-640a-4c30-9ae1-166d66a910bc", "address": "fa:16:3e:77:b3:c7", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap234f9512-64", "ovs_interfaceid": "234f9512-640a-4c30-9ae1-166d66a910bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1141.746811] env[68569]: DEBUG oslo_concurrency.lockutils [req-ee830d05-a5c8-43e7-adf5-ec752cf6519a req-0159ed91-de82-4410-b40e-ece522aaa704 service nova] Acquired lock "refresh_cache-fdcdd4b5-82bd-43c9-8865-807f86789a99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.746811] env[68569]: DEBUG nova.network.neutron [req-ee830d05-a5c8-43e7-adf5-ec752cf6519a req-0159ed91-de82-4410-b40e-ece522aaa704 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Refreshing network info cache for port 234f9512-640a-4c30-9ae1-166d66a910bc {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1141.748217] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:b3:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '234f9512-640a-4c30-9ae1-166d66a910bc', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1141.756243] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Creating folder: Project (335f83fe0b8e42aa80e8f0691b609649). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1141.760199] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-415a0b2e-37ce-4bee-a6ae-90de658ae519 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.763862] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93070c06-4402-4b44-aea9-e49444fb148c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.775457] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce046855-58ac-473f-9ce7-957c0db520cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.781026] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Created folder: Project (335f83fe0b8e42aa80e8f0691b609649) in parent group-v633430. [ 1141.781440] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Creating folder: Instances. Parent ref: group-v633716. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1141.781541] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-95fd435c-e756-4f8f-a7ef-be5fe49b4a0d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.811933] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd32eac1-963e-4913-aa6c-2bd81880e176 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.819784] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ce0b28-f8df-4c18-b4ea-7ed272e210ca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.824977] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Created folder: Instances in parent group-v633716. [ 1141.825240] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1141.825750] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1141.825992] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c9465a62-e99a-4e7e-9678-eb08174dfccf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.847548] env[68569]: DEBUG nova.compute.provider_tree [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1141.854646] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1141.854646] env[68569]: value = "task-3167774" [ 1141.854646] env[68569]: _type = "Task" [ 1141.854646] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.863875] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167774, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.871277] env[68569]: DEBUG nova.compute.manager [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1141.891195] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 4d61fcef-7e69-43bb-9d7b-c1e5b8c3dcbc] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1141.900704] env[68569]: DEBUG nova.virt.hardware [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1141.900973] env[68569]: DEBUG nova.virt.hardware [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1141.901148] env[68569]: DEBUG nova.virt.hardware [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1141.901332] env[68569]: DEBUG nova.virt.hardware [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1141.901477] env[68569]: DEBUG nova.virt.hardware [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1141.901664] env[68569]: DEBUG nova.virt.hardware [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1141.901868] env[68569]: DEBUG nova.virt.hardware [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1141.902037] env[68569]: DEBUG nova.virt.hardware [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1141.902213] env[68569]: DEBUG nova.virt.hardware [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1141.902374] env[68569]: DEBUG nova.virt.hardware [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1141.902544] env[68569]: DEBUG nova.virt.hardware [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1141.903393] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20f2e0f-5473-4ba3-af2c-16a09b2cff43 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.914565] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81dee503-75bd-4c2d-9041-84c7cf7fc42d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.972960] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.002808] env[68569]: DEBUG nova.network.neutron [req-ee830d05-a5c8-43e7-adf5-ec752cf6519a req-0159ed91-de82-4410-b40e-ece522aaa704 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Updated VIF entry in instance network info cache for port 234f9512-640a-4c30-9ae1-166d66a910bc. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1142.003234] env[68569]: DEBUG nova.network.neutron [req-ee830d05-a5c8-43e7-adf5-ec752cf6519a req-0159ed91-de82-4410-b40e-ece522aaa704 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Updating instance_info_cache with network_info: [{"id": "234f9512-640a-4c30-9ae1-166d66a910bc", "address": "fa:16:3e:77:b3:c7", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap234f9512-64", "ovs_interfaceid": "234f9512-640a-4c30-9ae1-166d66a910bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.025622] env[68569]: DEBUG nova.network.neutron [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Updating instance_info_cache with network_info: [{"id": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "address": "fa:16:3e:ec:47:b9", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8cb9bd3-0c", "ovs_interfaceid": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b197187f-ff62-4584-bbfa-1eacf3b6d13a", "address": "fa:16:3e:3b:6c:a5", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb197187f-ff", "ovs_interfaceid": "b197187f-ff62-4584-bbfa-1eacf3b6d13a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.351243] env[68569]: DEBUG nova.scheduler.client.report [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1142.365842] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167774, 'name': CreateVM_Task, 'duration_secs': 0.313295} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.366185] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1142.366995] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.367185] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.368224] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1142.368224] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51172f0c-301c-4756-b88f-6a2ed3346688 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.374582] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1142.374582] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520b1e19-c886-4a44-89a8-9efb6917c01b" [ 1142.374582] env[68569]: _type = "Task" [ 1142.374582] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.383439] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520b1e19-c886-4a44-89a8-9efb6917c01b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.396353] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 7b10cfb4-dc0a-4311-a24f-7a25869ef594] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1142.429855] env[68569]: DEBUG nova.network.neutron [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Successfully updated port: a6dc900f-8486-4e42-9982-a87df2d989b9 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1142.505611] env[68569]: DEBUG oslo_concurrency.lockutils [req-ee830d05-a5c8-43e7-adf5-ec752cf6519a req-0159ed91-de82-4410-b40e-ece522aaa704 service nova] Releasing lock "refresh_cache-fdcdd4b5-82bd-43c9-8865-807f86789a99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1142.505966] env[68569]: DEBUG nova.compute.manager [req-ee830d05-a5c8-43e7-adf5-ec752cf6519a req-0159ed91-de82-4410-b40e-ece522aaa704 service nova] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Received event network-vif-deleted-38c532c8-8ef0-4ec9-9ddb-aa70b4411d1e {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1142.528524] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1142.529337] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.529524] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.530406] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f117416-1aff-4d14-b26d-3278ca4c0197 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.548794] env[68569]: DEBUG nova.virt.hardware [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1142.549066] env[68569]: DEBUG nova.virt.hardware [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1142.549243] env[68569]: DEBUG nova.virt.hardware [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1142.549430] env[68569]: DEBUG nova.virt.hardware [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1142.549577] env[68569]: DEBUG nova.virt.hardware [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1142.549725] env[68569]: DEBUG nova.virt.hardware [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1142.549925] env[68569]: DEBUG nova.virt.hardware [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1142.550093] env[68569]: DEBUG nova.virt.hardware [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1142.550265] env[68569]: DEBUG nova.virt.hardware [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1142.550423] env[68569]: DEBUG nova.virt.hardware [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1142.550589] env[68569]: DEBUG nova.virt.hardware [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1142.557526] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Reconfiguring VM to attach interface {{(pid=68569) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1142.557846] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e0beff6-832b-423f-b230-65a68ad3b3b0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.576031] env[68569]: DEBUG oslo_vmware.api [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1142.576031] env[68569]: value = "task-3167775" [ 1142.576031] env[68569]: _type = "Task" [ 1142.576031] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.583876] env[68569]: DEBUG oslo_vmware.api [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167775, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.659325] env[68569]: DEBUG nova.compute.manager [req-0d6c8b4e-9561-471b-aa91-918933af31fc req-07acd676-b18a-47fd-bf87-db3c356cc38c service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Received event network-changed-b197187f-ff62-4584-bbfa-1eacf3b6d13a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1142.659476] env[68569]: DEBUG nova.compute.manager [req-0d6c8b4e-9561-471b-aa91-918933af31fc req-07acd676-b18a-47fd-bf87-db3c356cc38c service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Refreshing instance network info cache due to event network-changed-b197187f-ff62-4584-bbfa-1eacf3b6d13a. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1142.659688] env[68569]: DEBUG oslo_concurrency.lockutils [req-0d6c8b4e-9561-471b-aa91-918933af31fc req-07acd676-b18a-47fd-bf87-db3c356cc38c service nova] Acquiring lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.659828] env[68569]: DEBUG oslo_concurrency.lockutils [req-0d6c8b4e-9561-471b-aa91-918933af31fc req-07acd676-b18a-47fd-bf87-db3c356cc38c service nova] Acquired lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.659986] env[68569]: DEBUG nova.network.neutron [req-0d6c8b4e-9561-471b-aa91-918933af31fc req-07acd676-b18a-47fd-bf87-db3c356cc38c service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Refreshing network info cache for port b197187f-ff62-4584-bbfa-1eacf3b6d13a {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1142.860666] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.762s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.863180] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.354s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.865504] env[68569]: INFO nova.compute.claims [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1142.885960] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520b1e19-c886-4a44-89a8-9efb6917c01b, 'name': SearchDatastore_Task, 'duration_secs': 0.012889} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.887123] env[68569]: INFO nova.scheduler.client.report [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleted allocations for instance a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3 [ 1142.888274] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1142.889336] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1142.889441] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.889542] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.889727] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1142.892189] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90fda834-3c53-442e-871b-b6b00f74c856 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.899033] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: a7145443-aacb-4d9e-8e39-3741d0630849] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1142.902997] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1142.904027] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1142.904199] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aad2efcd-e064-4bdc-a9a9-588894316fec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.910545] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1142.910545] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527a4013-95cd-6e4e-0e0a-642b8caaec5f" [ 1142.910545] env[68569]: _type = "Task" [ 1142.910545] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.923876] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527a4013-95cd-6e4e-0e0a-642b8caaec5f, 'name': SearchDatastore_Task, 'duration_secs': 0.009613} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.924702] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad1622d5-7d75-44b0-82ee-58c284efbe23 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.930784] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1142.930784] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529aaad8-dc5b-a799-07de-bc25de359aca" [ 1142.930784] env[68569]: _type = "Task" [ 1142.930784] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.934898] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "refresh_cache-5de9a459-a2a2-4d78-9a66-cf819e8893b6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.935041] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquired lock "refresh_cache-5de9a459-a2a2-4d78-9a66-cf819e8893b6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.935189] env[68569]: DEBUG nova.network.neutron [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1142.942384] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529aaad8-dc5b-a799-07de-bc25de359aca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.087457] env[68569]: DEBUG oslo_vmware.api [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167775, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.352674] env[68569]: DEBUG nova.compute.manager [req-75abd6d3-d93a-4307-9bb8-ee1e58914af5 req-5899f6e0-51ff-4a2d-a7f3-57e7d8e01afc service nova] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Received event network-vif-plugged-a6dc900f-8486-4e42-9982-a87df2d989b9 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1143.352951] env[68569]: DEBUG oslo_concurrency.lockutils [req-75abd6d3-d93a-4307-9bb8-ee1e58914af5 req-5899f6e0-51ff-4a2d-a7f3-57e7d8e01afc service nova] Acquiring lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.353180] env[68569]: DEBUG oslo_concurrency.lockutils [req-75abd6d3-d93a-4307-9bb8-ee1e58914af5 req-5899f6e0-51ff-4a2d-a7f3-57e7d8e01afc service nova] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.353321] env[68569]: DEBUG oslo_concurrency.lockutils [req-75abd6d3-d93a-4307-9bb8-ee1e58914af5 req-5899f6e0-51ff-4a2d-a7f3-57e7d8e01afc service nova] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.353488] env[68569]: DEBUG nova.compute.manager [req-75abd6d3-d93a-4307-9bb8-ee1e58914af5 req-5899f6e0-51ff-4a2d-a7f3-57e7d8e01afc service nova] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] No waiting events found dispatching network-vif-plugged-a6dc900f-8486-4e42-9982-a87df2d989b9 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1143.353650] env[68569]: WARNING nova.compute.manager [req-75abd6d3-d93a-4307-9bb8-ee1e58914af5 req-5899f6e0-51ff-4a2d-a7f3-57e7d8e01afc service nova] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Received unexpected event network-vif-plugged-a6dc900f-8486-4e42-9982-a87df2d989b9 for instance with vm_state building and task_state spawning. [ 1143.353809] env[68569]: DEBUG nova.compute.manager [req-75abd6d3-d93a-4307-9bb8-ee1e58914af5 req-5899f6e0-51ff-4a2d-a7f3-57e7d8e01afc service nova] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Received event network-changed-a6dc900f-8486-4e42-9982-a87df2d989b9 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1143.353962] env[68569]: DEBUG nova.compute.manager [req-75abd6d3-d93a-4307-9bb8-ee1e58914af5 req-5899f6e0-51ff-4a2d-a7f3-57e7d8e01afc service nova] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Refreshing instance network info cache due to event network-changed-a6dc900f-8486-4e42-9982-a87df2d989b9. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1143.354141] env[68569]: DEBUG oslo_concurrency.lockutils [req-75abd6d3-d93a-4307-9bb8-ee1e58914af5 req-5899f6e0-51ff-4a2d-a7f3-57e7d8e01afc service nova] Acquiring lock "refresh_cache-5de9a459-a2a2-4d78-9a66-cf819e8893b6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.373519] env[68569]: DEBUG nova.network.neutron [req-0d6c8b4e-9561-471b-aa91-918933af31fc req-07acd676-b18a-47fd-bf87-db3c356cc38c service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Updated VIF entry in instance network info cache for port b197187f-ff62-4584-bbfa-1eacf3b6d13a. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1143.373963] env[68569]: DEBUG nova.network.neutron [req-0d6c8b4e-9561-471b-aa91-918933af31fc req-07acd676-b18a-47fd-bf87-db3c356cc38c service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Updating instance_info_cache with network_info: [{"id": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "address": "fa:16:3e:ec:47:b9", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8cb9bd3-0c", "ovs_interfaceid": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b197187f-ff62-4584-bbfa-1eacf3b6d13a", "address": "fa:16:3e:3b:6c:a5", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb197187f-ff", "ovs_interfaceid": "b197187f-ff62-4584-bbfa-1eacf3b6d13a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.397450] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c7e56676-d874-4b09-8a4b-eee2d8a115fd tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.438s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.404992] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 1670f03a-94e2-4005-be7e-41aad61a8925] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1143.442258] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529aaad8-dc5b-a799-07de-bc25de359aca, 'name': SearchDatastore_Task, 'duration_secs': 0.010007} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.442788] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.443052] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] fdcdd4b5-82bd-43c9-8865-807f86789a99/fdcdd4b5-82bd-43c9-8865-807f86789a99.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1143.443507] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-173d9028-6d7a-4b82-8377-34a5ed6d1966 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.451598] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1143.451598] env[68569]: value = "task-3167776" [ 1143.451598] env[68569]: _type = "Task" [ 1143.451598] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.459904] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167776, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.469120] env[68569]: DEBUG nova.network.neutron [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1143.587358] env[68569]: DEBUG oslo_vmware.api [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167775, 'name': ReconfigVM_Task, 'duration_secs': 0.554833} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.587863] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.588119] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Reconfigured VM to attach interface {{(pid=68569) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1143.612397] env[68569]: DEBUG nova.network.neutron [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Updating instance_info_cache with network_info: [{"id": "a6dc900f-8486-4e42-9982-a87df2d989b9", "address": "fa:16:3e:07:38:7a", "network": {"id": "9b574e65-367b-4135-8ce6-e40fd22bc9f3", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1543341439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "713d88f021794769a64eef3807ade9be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6dc900f-84", "ovs_interfaceid": "a6dc900f-8486-4e42-9982-a87df2d989b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.876686] env[68569]: DEBUG oslo_concurrency.lockutils [req-0d6c8b4e-9561-471b-aa91-918933af31fc req-07acd676-b18a-47fd-bf87-db3c356cc38c service nova] Releasing lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.908020] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 705bbc03-30ac-4d5b-a3f0-6505171a69fc] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1143.962243] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167776, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474292} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.964671] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] fdcdd4b5-82bd-43c9-8865-807f86789a99/fdcdd4b5-82bd-43c9-8865-807f86789a99.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1143.964883] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1143.965322] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f5cfb82-31ae-4a83-b468-7c00d52c6a56 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.972548] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1143.972548] env[68569]: value = "task-3167777" [ 1143.972548] env[68569]: _type = "Task" [ 1143.972548] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.986652] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167777, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.026763] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4514d14-9c5d-4985-8d53-b6aff3ae1f61 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.035423] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3bbba3a-869b-4aaf-88db-3730fe16b331 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.067974] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2eb92b-8d9a-456f-bbae-55c6d9efd509 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.076160] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c46a8eb-ddf9-47c4-9165-e700b4458b1f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.089872] env[68569]: DEBUG nova.compute.provider_tree [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1144.091997] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6997fd93-6187-45db-8f2e-dd44d9c48517 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-ee188712-b0e0-44ee-80b4-be72da32299f-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.377s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.114664] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Releasing lock "refresh_cache-5de9a459-a2a2-4d78-9a66-cf819e8893b6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.115010] env[68569]: DEBUG nova.compute.manager [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Instance network_info: |[{"id": "a6dc900f-8486-4e42-9982-a87df2d989b9", "address": "fa:16:3e:07:38:7a", "network": {"id": "9b574e65-367b-4135-8ce6-e40fd22bc9f3", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1543341439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "713d88f021794769a64eef3807ade9be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6dc900f-84", "ovs_interfaceid": "a6dc900f-8486-4e42-9982-a87df2d989b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1144.115543] env[68569]: DEBUG oslo_concurrency.lockutils [req-75abd6d3-d93a-4307-9bb8-ee1e58914af5 req-5899f6e0-51ff-4a2d-a7f3-57e7d8e01afc service nova] Acquired lock "refresh_cache-5de9a459-a2a2-4d78-9a66-cf819e8893b6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.115731] env[68569]: DEBUG nova.network.neutron [req-75abd6d3-d93a-4307-9bb8-ee1e58914af5 req-5899f6e0-51ff-4a2d-a7f3-57e7d8e01afc service nova] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Refreshing network info cache for port a6dc900f-8486-4e42-9982-a87df2d989b9 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1144.116831] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:38:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c791d09c-1086-4ee1-bcde-6ca7d259cabd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6dc900f-8486-4e42-9982-a87df2d989b9', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1144.124209] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1144.125149] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1144.125403] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a6a2298b-e788-4e3b-a6d9-971ccc41c213 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.146537] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1144.146537] env[68569]: value = "task-3167778" [ 1144.146537] env[68569]: _type = "Task" [ 1144.146537] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.154801] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167778, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.411789] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 281d7077-391a-4cce-9d31-af41568a2b7c] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1144.485458] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167777, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071729} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.485712] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1144.486634] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35191be8-2b3a-4894-872b-45ca32f12035 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.511530] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] fdcdd4b5-82bd-43c9-8865-807f86789a99/fdcdd4b5-82bd-43c9-8865-807f86789a99.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1144.512762] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d2f6b20-054f-4dc3-a822-8a11297322eb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.535388] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1144.535388] env[68569]: value = "task-3167779" [ 1144.535388] env[68569]: _type = "Task" [ 1144.535388] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.545140] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167779, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.594143] env[68569]: DEBUG nova.scheduler.client.report [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1144.659094] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167778, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.712293] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.712581] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.712777] env[68569]: INFO nova.compute.manager [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Shelving [ 1144.893744] env[68569]: DEBUG nova.network.neutron [req-75abd6d3-d93a-4307-9bb8-ee1e58914af5 req-5899f6e0-51ff-4a2d-a7f3-57e7d8e01afc service nova] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Updated VIF entry in instance network info cache for port a6dc900f-8486-4e42-9982-a87df2d989b9. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1144.894112] env[68569]: DEBUG nova.network.neutron [req-75abd6d3-d93a-4307-9bb8-ee1e58914af5 req-5899f6e0-51ff-4a2d-a7f3-57e7d8e01afc service nova] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Updating instance_info_cache with network_info: [{"id": "a6dc900f-8486-4e42-9982-a87df2d989b9", "address": "fa:16:3e:07:38:7a", "network": {"id": "9b574e65-367b-4135-8ce6-e40fd22bc9f3", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1543341439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "713d88f021794769a64eef3807ade9be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6dc900f-84", "ovs_interfaceid": "a6dc900f-8486-4e42-9982-a87df2d989b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.915108] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: ae92919c-f2eb-4231-afea-b23269e09a0a] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1145.046327] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167779, 'name': ReconfigVM_Task, 'duration_secs': 0.385995} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.046621] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Reconfigured VM instance instance-0000006c to attach disk [datastore2] fdcdd4b5-82bd-43c9-8865-807f86789a99/fdcdd4b5-82bd-43c9-8865-807f86789a99.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1145.047182] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c7d30df-113e-4efd-b043-402812f952ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.053935] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1145.053935] env[68569]: value = "task-3167780" [ 1145.053935] env[68569]: _type = "Task" [ 1145.053935] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.061561] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167780, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.099962] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.237s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.100529] env[68569]: DEBUG nova.compute.manager [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1145.103592] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.320s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.103592] env[68569]: DEBUG nova.objects.instance [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lazy-loading 'resources' on Instance uuid a9e87dfc-6e00-4e55-8a8f-bc3174b991da {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.159330] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167778, 'name': CreateVM_Task, 'duration_secs': 0.649521} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.160057] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1145.160186] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.160357] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1145.160740] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1145.160968] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b21cb0aa-cf31-4fbd-b71a-9557eea55841 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.167360] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1145.167360] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520adc66-103d-ad0f-5913-8bfc2ad837f4" [ 1145.167360] env[68569]: _type = "Task" [ 1145.167360] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.176777] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520adc66-103d-ad0f-5913-8bfc2ad837f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.396826] env[68569]: DEBUG oslo_concurrency.lockutils [req-75abd6d3-d93a-4307-9bb8-ee1e58914af5 req-5899f6e0-51ff-4a2d-a7f3-57e7d8e01afc service nova] Releasing lock "refresh_cache-5de9a459-a2a2-4d78-9a66-cf819e8893b6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.418585] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 1689f1f3-53f2-4c02-a969-e4dae21f14b7] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1145.564765] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167780, 'name': Rename_Task, 'duration_secs': 0.151028} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.565195] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1145.565399] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2f3f646-1ab3-4e3c-86d3-e39fbf6a4ccc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.573195] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1145.573195] env[68569]: value = "task-3167781" [ 1145.573195] env[68569]: _type = "Task" [ 1145.573195] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.582164] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167781, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.606781] env[68569]: DEBUG nova.compute.utils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1145.611581] env[68569]: DEBUG nova.compute.manager [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1145.611792] env[68569]: DEBUG nova.network.neutron [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1145.657374] env[68569]: DEBUG nova.policy [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '842e74e7139540d7a537eb8bd56bca78', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e52d937c83d46daa36746494bd7ccbe', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1145.678440] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520adc66-103d-ad0f-5913-8bfc2ad837f4, 'name': SearchDatastore_Task, 'duration_secs': 0.013208} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.678640] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.678886] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1145.679191] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.679340] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1145.679521] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1145.679786] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0c593d8-053f-4163-841b-2489bb5c80b7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.689187] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1145.689405] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1145.690181] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-027d6c33-3d23-4bf9-9553-1f5b48ee24fc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.697742] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1145.697742] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52efbe1f-2a71-9a0a-c5be-6e71b686e1b8" [ 1145.697742] env[68569]: _type = "Task" [ 1145.697742] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.706925] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52efbe1f-2a71-9a0a-c5be-6e71b686e1b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.721671] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1145.724110] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5d8057e5-9906-42a8-af5d-df10799dc887 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.732636] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1145.732636] env[68569]: value = "task-3167782" [ 1145.732636] env[68569]: _type = "Task" [ 1145.732636] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.744471] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167782, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.762387] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde39323-e600-4b63-852c-06f4925cd874 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.772711] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2900b2f8-37c4-4e03-8757-300e2ebcf88d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.805826] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "interface-ee188712-b0e0-44ee-80b4-be72da32299f-1c0c5e5b-c2d8-4845-8b34-e2356a2452df" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.806111] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-ee188712-b0e0-44ee-80b4-be72da32299f-1c0c5e5b-c2d8-4845-8b34-e2356a2452df" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.806467] env[68569]: DEBUG nova.objects.instance [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'flavor' on Instance uuid ee188712-b0e0-44ee-80b4-be72da32299f {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.808693] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8d791b-3842-4699-8979-b168e1c67f47 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.818012] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2df89f3-60c2-45d5-9614-4af3f08396f6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.834629] env[68569]: DEBUG nova.compute.provider_tree [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.924116] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 92e4fbfd-fef5-4c06-a9fd-fa15ea5fe948] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1145.958604] env[68569]: DEBUG nova.network.neutron [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Successfully created port: 66a22ce3-2444-47fc-aaf8-741e718c05f0 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1146.084194] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "3363dac8-c5df-405e-8bdc-9002e2d45e05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.084194] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "3363dac8-c5df-405e-8bdc-9002e2d45e05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.089766] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167781, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.112102] env[68569]: DEBUG nova.compute.manager [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1146.210030] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52efbe1f-2a71-9a0a-c5be-6e71b686e1b8, 'name': SearchDatastore_Task, 'duration_secs': 0.010965} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.210900] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0f29d02-9459-4fe0-883f-433cf7746be7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.216688] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1146.216688] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52371ee7-1013-b071-414e-4358d823d8b3" [ 1146.216688] env[68569]: _type = "Task" [ 1146.216688] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.224564] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52371ee7-1013-b071-414e-4358d823d8b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.241966] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167782, 'name': PowerOffVM_Task, 'duration_secs': 0.235597} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.242261] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1146.243040] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4788b053-10fa-40b9-941e-35bca9bbff79 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.261548] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca30ac5e-bbc0-4b74-aedf-c51eeb4272f1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.337097] env[68569]: DEBUG nova.scheduler.client.report [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1146.399762] env[68569]: DEBUG nova.objects.instance [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'pci_requests' on Instance uuid ee188712-b0e0-44ee-80b4-be72da32299f {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1146.425946] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 0dc5da15-4c10-4754-ac82-a130b933295d] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1146.584417] env[68569]: DEBUG oslo_vmware.api [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167781, 'name': PowerOnVM_Task, 'duration_secs': 0.527706} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.584417] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1146.584611] env[68569]: INFO nova.compute.manager [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Took 6.95 seconds to spawn the instance on the hypervisor. [ 1146.584704] env[68569]: DEBUG nova.compute.manager [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1146.585472] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a73425f-5888-4698-87ce-d881827ea983 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.588294] env[68569]: DEBUG nova.compute.manager [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1146.728411] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52371ee7-1013-b071-414e-4358d823d8b3, 'name': SearchDatastore_Task, 'duration_secs': 0.01155} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.728641] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.728986] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 5de9a459-a2a2-4d78-9a66-cf819e8893b6/5de9a459-a2a2-4d78-9a66-cf819e8893b6.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1146.729289] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c76d77c3-253c-4132-98a6-3c176632f3dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.738061] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1146.738061] env[68569]: value = "task-3167783" [ 1146.738061] env[68569]: _type = "Task" [ 1146.738061] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.747298] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167783, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.771748] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1146.772129] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b8399212-2317-4832-aee5-1131311aa960 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.781453] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1146.781453] env[68569]: value = "task-3167784" [ 1146.781453] env[68569]: _type = "Task" [ 1146.781453] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.844817] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.741s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.847530] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.875s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.847761] env[68569]: DEBUG nova.objects.instance [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lazy-loading 'resources' on Instance uuid fca05228-2a17-4a7e-94a0-449ba74a8933 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1146.865696] env[68569]: INFO nova.scheduler.client.report [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Deleted allocations for instance a9e87dfc-6e00-4e55-8a8f-bc3174b991da [ 1146.902887] env[68569]: DEBUG nova.objects.base [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1146.903133] env[68569]: DEBUG nova.network.neutron [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1146.929248] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: adc7f255-be88-4eda-be25-f9ecc9d9bf99] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1146.976633] env[68569]: DEBUG nova.policy [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b5878b8c7304fce9e150e9be38f10c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7613bcf5361d4b08a8d864e59b7fe858', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1147.109041] env[68569]: INFO nova.compute.manager [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Took 12.07 seconds to build instance. [ 1147.121159] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.123153] env[68569]: DEBUG nova.compute.manager [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1147.159154] env[68569]: DEBUG nova.virt.hardware [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1147.159634] env[68569]: DEBUG nova.virt.hardware [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1147.159955] env[68569]: DEBUG nova.virt.hardware [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1147.160388] env[68569]: DEBUG nova.virt.hardware [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1147.160608] env[68569]: DEBUG nova.virt.hardware [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1147.160774] env[68569]: DEBUG nova.virt.hardware [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1147.160998] env[68569]: DEBUG nova.virt.hardware [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1147.161184] env[68569]: DEBUG nova.virt.hardware [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1147.161360] env[68569]: DEBUG nova.virt.hardware [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1147.161527] env[68569]: DEBUG nova.virt.hardware [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1147.161699] env[68569]: DEBUG nova.virt.hardware [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1147.163144] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd43dc11-3fb9-4135-b94d-e273ac60134a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.174738] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0380d30b-1c54-4393-9722-e5f1d7b0ed14 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.250927] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167783, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488929} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.251197] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 5de9a459-a2a2-4d78-9a66-cf819e8893b6/5de9a459-a2a2-4d78-9a66-cf819e8893b6.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1147.251420] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1147.251685] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-229eff5f-d625-44a0-a701-6889be00b44d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.259577] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1147.259577] env[68569]: value = "task-3167785" [ 1147.259577] env[68569]: _type = "Task" [ 1147.259577] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.268144] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167785, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.292737] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167784, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.375212] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6aefdc55-c650-4cf8-84b3-d80a56dd8f4e tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "a9e87dfc-6e00-4e55-8a8f-bc3174b991da" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.170s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.401692] env[68569]: DEBUG nova.compute.manager [req-7a97f67b-02ff-409f-9447-e429268f1795 req-92fda904-7baf-480a-9d6b-a4628025c0bb service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Received event network-vif-plugged-66a22ce3-2444-47fc-aaf8-741e718c05f0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1147.401915] env[68569]: DEBUG oslo_concurrency.lockutils [req-7a97f67b-02ff-409f-9447-e429268f1795 req-92fda904-7baf-480a-9d6b-a4628025c0bb service nova] Acquiring lock "09f4018b-f1cd-4726-b871-b110a7cf1b43-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.402145] env[68569]: DEBUG oslo_concurrency.lockutils [req-7a97f67b-02ff-409f-9447-e429268f1795 req-92fda904-7baf-480a-9d6b-a4628025c0bb service nova] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.402315] env[68569]: DEBUG oslo_concurrency.lockutils [req-7a97f67b-02ff-409f-9447-e429268f1795 req-92fda904-7baf-480a-9d6b-a4628025c0bb service nova] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.402483] env[68569]: DEBUG nova.compute.manager [req-7a97f67b-02ff-409f-9447-e429268f1795 req-92fda904-7baf-480a-9d6b-a4628025c0bb service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] No waiting events found dispatching network-vif-plugged-66a22ce3-2444-47fc-aaf8-741e718c05f0 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1147.402649] env[68569]: WARNING nova.compute.manager [req-7a97f67b-02ff-409f-9447-e429268f1795 req-92fda904-7baf-480a-9d6b-a4628025c0bb service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Received unexpected event network-vif-plugged-66a22ce3-2444-47fc-aaf8-741e718c05f0 for instance with vm_state building and task_state spawning. [ 1147.433458] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 7b95aece-35db-4eab-b221-c5eccd749eae] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1147.494958] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b802c089-7321-436b-ae0e-6d54385418d1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.504408] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643ec544-b67c-453e-ba29-502b4aa4192c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.536814] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a55ab72-1281-4231-8845-6a500e7658f6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.545622] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc15dc1-456e-4065-ae30-b50e37cc9fe3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.562022] env[68569]: DEBUG nova.compute.provider_tree [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1147.605888] env[68569]: DEBUG nova.network.neutron [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Successfully updated port: 66a22ce3-2444-47fc-aaf8-741e718c05f0 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1147.611794] env[68569]: DEBUG oslo_concurrency.lockutils [None req-65f0628f-1b87-44a0-9598-ba7b0e2343a2 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "fdcdd4b5-82bd-43c9-8865-807f86789a99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.585s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.770011] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167785, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07571} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.770365] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1147.771158] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11919c07-0c38-4310-86fd-4fe3f1ed67cd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.794278] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 5de9a459-a2a2-4d78-9a66-cf819e8893b6/5de9a459-a2a2-4d78-9a66-cf819e8893b6.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1147.797499] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2350d65a-c396-4878-940f-b993fc2aaeaf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.819350] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167784, 'name': CreateSnapshot_Task, 'duration_secs': 0.798531} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.822842] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1147.822842] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb46e628-3c56-4200-aec2-9572cba1b907 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.824856] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1147.824856] env[68569]: value = "task-3167786" [ 1147.824856] env[68569]: _type = "Task" [ 1147.824856] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.840214] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167786, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.936930] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: f8b56e81-f3ef-489b-a64c-be687cf99fd1] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1148.065654] env[68569]: DEBUG nova.scheduler.client.report [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1148.108883] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "refresh_cache-09f4018b-f1cd-4726-b871-b110a7cf1b43" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.109087] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired lock "refresh_cache-09f4018b-f1cd-4726-b871-b110a7cf1b43" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.109246] env[68569]: DEBUG nova.network.neutron [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1148.337542] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167786, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.347992] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1148.348337] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-12be43fc-3b49-48ed-8449-f484cbc379ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.357878] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1148.357878] env[68569]: value = "task-3167787" [ 1148.357878] env[68569]: _type = "Task" [ 1148.357878] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.381020] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167787, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.440645] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: e9994248-0240-412b-9e60-a04b00e5c0cd] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1148.558853] env[68569]: DEBUG nova.compute.manager [req-1b6a3e10-3ca3-44bf-aa6f-c9b09bb16b83 req-281ccb90-afa6-47b6-b302-c21b0da637df service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Received event network-vif-plugged-1c0c5e5b-c2d8-4845-8b34-e2356a2452df {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1148.559547] env[68569]: DEBUG oslo_concurrency.lockutils [req-1b6a3e10-3ca3-44bf-aa6f-c9b09bb16b83 req-281ccb90-afa6-47b6-b302-c21b0da637df service nova] Acquiring lock "ee188712-b0e0-44ee-80b4-be72da32299f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.560622] env[68569]: DEBUG oslo_concurrency.lockutils [req-1b6a3e10-3ca3-44bf-aa6f-c9b09bb16b83 req-281ccb90-afa6-47b6-b302-c21b0da637df service nova] Lock "ee188712-b0e0-44ee-80b4-be72da32299f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.560989] env[68569]: DEBUG oslo_concurrency.lockutils [req-1b6a3e10-3ca3-44bf-aa6f-c9b09bb16b83 req-281ccb90-afa6-47b6-b302-c21b0da637df service nova] Lock "ee188712-b0e0-44ee-80b4-be72da32299f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.561149] env[68569]: DEBUG nova.compute.manager [req-1b6a3e10-3ca3-44bf-aa6f-c9b09bb16b83 req-281ccb90-afa6-47b6-b302-c21b0da637df service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] No waiting events found dispatching network-vif-plugged-1c0c5e5b-c2d8-4845-8b34-e2356a2452df {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1148.561309] env[68569]: WARNING nova.compute.manager [req-1b6a3e10-3ca3-44bf-aa6f-c9b09bb16b83 req-281ccb90-afa6-47b6-b302-c21b0da637df service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Received unexpected event network-vif-plugged-1c0c5e5b-c2d8-4845-8b34-e2356a2452df for instance with vm_state active and task_state None. [ 1148.570641] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.723s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.572984] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.452s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.574479] env[68569]: INFO nova.compute.claims [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1148.588165] env[68569]: INFO nova.scheduler.client.report [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleted allocations for instance fca05228-2a17-4a7e-94a0-449ba74a8933 [ 1148.648228] env[68569]: DEBUG nova.network.neutron [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1148.673313] env[68569]: DEBUG nova.network.neutron [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Successfully updated port: 1c0c5e5b-c2d8-4845-8b34-e2356a2452df {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1148.810481] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.810805] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.836932] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167786, 'name': ReconfigVM_Task, 'duration_secs': 0.852088} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.838143] env[68569]: DEBUG nova.network.neutron [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Updating instance_info_cache with network_info: [{"id": "66a22ce3-2444-47fc-aaf8-741e718c05f0", "address": "fa:16:3e:69:0a:26", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66a22ce3-24", "ovs_interfaceid": "66a22ce3-2444-47fc-aaf8-741e718c05f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.839397] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 5de9a459-a2a2-4d78-9a66-cf819e8893b6/5de9a459-a2a2-4d78-9a66-cf819e8893b6.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1148.840289] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2d47563-55a3-4952-9542-453881bdb87e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.849139] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1148.849139] env[68569]: value = "task-3167788" [ 1148.849139] env[68569]: _type = "Task" [ 1148.849139] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.860291] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167788, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.870090] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167787, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.944187] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 6b2120d3-2e4b-4d1b-8109-6513b3b320eb] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1149.098703] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d91cbe7c-bc11-4af9-9baa-812ac620af22 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "fca05228-2a17-4a7e-94a0-449ba74a8933" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.024s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.175710] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.176027] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.176227] env[68569]: DEBUG nova.network.neutron [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1149.313497] env[68569]: DEBUG nova.compute.manager [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1149.341710] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Releasing lock "refresh_cache-09f4018b-f1cd-4726-b871-b110a7cf1b43" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.342085] env[68569]: DEBUG nova.compute.manager [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Instance network_info: |[{"id": "66a22ce3-2444-47fc-aaf8-741e718c05f0", "address": "fa:16:3e:69:0a:26", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66a22ce3-24", "ovs_interfaceid": "66a22ce3-2444-47fc-aaf8-741e718c05f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1149.342540] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:0a:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbdab640-5fea-4254-8bd3-f855b7eaca0d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66a22ce3-2444-47fc-aaf8-741e718c05f0', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1149.350444] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1149.350774] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1149.354013] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b4843cd-531b-4596-9dcc-003288b36581 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.379077] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167788, 'name': Rename_Task, 'duration_secs': 0.265504} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.382478] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1149.382776] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167787, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.383973] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-100b26b5-0e57-43a3-91a1-3d563d2c0f68 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.385792] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1149.385792] env[68569]: value = "task-3167789" [ 1149.385792] env[68569]: _type = "Task" [ 1149.385792] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.391656] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1149.391656] env[68569]: value = "task-3167790" [ 1149.391656] env[68569]: _type = "Task" [ 1149.391656] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.398167] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167789, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.426139] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167790, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.448809] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 2cf8803a-8078-4832-a736-330d6bcde6ce] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1149.455530] env[68569]: DEBUG nova.compute.manager [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Received event network-changed-66a22ce3-2444-47fc-aaf8-741e718c05f0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1149.455869] env[68569]: DEBUG nova.compute.manager [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Refreshing instance network info cache due to event network-changed-66a22ce3-2444-47fc-aaf8-741e718c05f0. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1149.456429] env[68569]: DEBUG oslo_concurrency.lockutils [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] Acquiring lock "refresh_cache-09f4018b-f1cd-4726-b871-b110a7cf1b43" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.456645] env[68569]: DEBUG oslo_concurrency.lockutils [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] Acquired lock "refresh_cache-09f4018b-f1cd-4726-b871-b110a7cf1b43" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.456862] env[68569]: DEBUG nova.network.neutron [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Refreshing network info cache for port 66a22ce3-2444-47fc-aaf8-741e718c05f0 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1149.725201] env[68569]: WARNING nova.network.neutron [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] 01780a40-9441-415d-988a-24afe70ba382 already exists in list: networks containing: ['01780a40-9441-415d-988a-24afe70ba382']. ignoring it [ 1149.725498] env[68569]: WARNING nova.network.neutron [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] 01780a40-9441-415d-988a-24afe70ba382 already exists in list: networks containing: ['01780a40-9441-415d-988a-24afe70ba382']. ignoring it [ 1149.751613] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e5233c-2d87-4df2-a0bd-e67fb4f497c0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.761373] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3492bfff-54f4-4901-aa66-a3d59d764c42 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.799577] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b54d7ca-360b-4275-999e-343605436936 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.816369] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ecf38fe-05f1-4eb1-bca6-20bf7fffd881 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.837306] env[68569]: DEBUG nova.compute.provider_tree [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1149.843199] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.881701] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167787, 'name': CloneVM_Task} progress is 95%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.903220] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167789, 'name': CreateVM_Task, 'duration_secs': 0.462545} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.904132] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1149.905071] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.905220] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.905548] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1149.905838] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bf75404-7b1a-4838-ad7b-45ff12798b63 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.913017] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167790, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.918526] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1149.918526] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5220b8fb-d551-ce75-1014-8b401d61df70" [ 1149.918526] env[68569]: _type = "Task" [ 1149.918526] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.930310] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5220b8fb-d551-ce75-1014-8b401d61df70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.952335] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: c0ea0ef8-93c2-416a-8caa-a51f7a39627e] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1150.223343] env[68569]: DEBUG nova.network.neutron [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Updating instance_info_cache with network_info: [{"id": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "address": "fa:16:3e:ec:47:b9", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8cb9bd3-0c", "ovs_interfaceid": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b197187f-ff62-4584-bbfa-1eacf3b6d13a", "address": "fa:16:3e:3b:6c:a5", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb197187f-ff", "ovs_interfaceid": "b197187f-ff62-4584-bbfa-1eacf3b6d13a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1c0c5e5b-c2d8-4845-8b34-e2356a2452df", "address": "fa:16:3e:5e:77:17", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c0c5e5b-c2", "ovs_interfaceid": "1c0c5e5b-c2d8-4845-8b34-e2356a2452df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.292061] env[68569]: DEBUG nova.network.neutron [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Updated VIF entry in instance network info cache for port 66a22ce3-2444-47fc-aaf8-741e718c05f0. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1150.292441] env[68569]: DEBUG nova.network.neutron [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Updating instance_info_cache with network_info: [{"id": "66a22ce3-2444-47fc-aaf8-741e718c05f0", "address": "fa:16:3e:69:0a:26", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66a22ce3-24", "ovs_interfaceid": "66a22ce3-2444-47fc-aaf8-741e718c05f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.341043] env[68569]: DEBUG nova.scheduler.client.report [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1150.380502] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167787, 'name': CloneVM_Task, 'duration_secs': 1.844612} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.380781] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Created linked-clone VM from snapshot [ 1150.381535] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef42442-b6cd-42f2-8458-20db1f08c1de {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.389840] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Uploading image f6c1f108-99b5-4bc9-80cd-889b31900043 {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1150.404487] env[68569]: DEBUG oslo_concurrency.lockutils [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "f5f8b054-7ee4-40f5-84de-1cee02949cd2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.404697] env[68569]: DEBUG oslo_concurrency.lockutils [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "f5f8b054-7ee4-40f5-84de-1cee02949cd2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.404905] env[68569]: DEBUG oslo_concurrency.lockutils [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "f5f8b054-7ee4-40f5-84de-1cee02949cd2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.405085] env[68569]: DEBUG oslo_concurrency.lockutils [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "f5f8b054-7ee4-40f5-84de-1cee02949cd2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.405260] env[68569]: DEBUG oslo_concurrency.lockutils [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "f5f8b054-7ee4-40f5-84de-1cee02949cd2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.406837] env[68569]: DEBUG oslo_vmware.api [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167790, 'name': PowerOnVM_Task, 'duration_secs': 0.765394} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.407094] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1150.407298] env[68569]: INFO nova.compute.manager [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Took 8.54 seconds to spawn the instance on the hypervisor. [ 1150.407465] env[68569]: DEBUG nova.compute.manager [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1150.407962] env[68569]: INFO nova.compute.manager [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Terminating instance [ 1150.409695] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b269dbdf-7b03-4080-91b7-433d5ff8722e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.423018] env[68569]: DEBUG oslo_vmware.rw_handles [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1150.423018] env[68569]: value = "vm-633721" [ 1150.423018] env[68569]: _type = "VirtualMachine" [ 1150.423018] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1150.423822] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5cd1ec15-a27d-4a8e-874f-808b8a90d235 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.436366] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5220b8fb-d551-ce75-1014-8b401d61df70, 'name': SearchDatastore_Task, 'duration_secs': 0.016814} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.438142] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.438259] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1150.438419] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.438581] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.439064] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1150.439115] env[68569]: DEBUG oslo_vmware.rw_handles [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lease: (returnval){ [ 1150.439115] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52846338-a6a8-40fd-302c-009f945500c0" [ 1150.439115] env[68569]: _type = "HttpNfcLease" [ 1150.439115] env[68569]: } obtained for exporting VM: (result){ [ 1150.439115] env[68569]: value = "vm-633721" [ 1150.439115] env[68569]: _type = "VirtualMachine" [ 1150.439115] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1150.439359] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the lease: (returnval){ [ 1150.439359] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52846338-a6a8-40fd-302c-009f945500c0" [ 1150.439359] env[68569]: _type = "HttpNfcLease" [ 1150.439359] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1150.439702] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-677b973e-6357-4c08-beee-c10c71f65324 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.451226] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1150.451226] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52846338-a6a8-40fd-302c-009f945500c0" [ 1150.451226] env[68569]: _type = "HttpNfcLease" [ 1150.451226] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1150.451664] env[68569]: DEBUG oslo_vmware.rw_handles [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1150.451664] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52846338-a6a8-40fd-302c-009f945500c0" [ 1150.451664] env[68569]: _type = "HttpNfcLease" [ 1150.451664] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1150.452133] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61615a9a-739c-42e7-a074-ed44fa7a8fcc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.455835] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 7696390d-a097-4b6d-827d-92f3165a4188] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1150.457505] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1150.458261] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1150.459511] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-820ffb88-6693-44c7-9f5b-ecf91b640299 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.465504] env[68569]: DEBUG oslo_vmware.rw_handles [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b40803-491c-16e0-8a9b-674cb21b8ccb/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1150.465679] env[68569]: DEBUG oslo_vmware.rw_handles [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b40803-491c-16e0-8a9b-674cb21b8ccb/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1150.530309] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1150.530309] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526ebb8a-4694-8018-8c8f-cb02eb3e35fb" [ 1150.530309] env[68569]: _type = "Task" [ 1150.530309] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.542755] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526ebb8a-4694-8018-8c8f-cb02eb3e35fb, 'name': SearchDatastore_Task, 'duration_secs': 0.013048} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.543576] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d717a83b-064b-4b23-8674-482871275548 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.552309] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1150.552309] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5214ec12-bb40-870a-d9fc-6a0138472a60" [ 1150.552309] env[68569]: _type = "Task" [ 1150.552309] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.561961] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5214ec12-bb40-870a-d9fc-6a0138472a60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.605544] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a6689ac2-cbbf-4ae1-9303-7fdf372c7461 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.726751] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.727586] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.727743] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.729122] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c575901-568b-48f4-bb3e-2d60e909ed23 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.748783] env[68569]: DEBUG nova.virt.hardware [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1150.749045] env[68569]: DEBUG nova.virt.hardware [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1150.749228] env[68569]: DEBUG nova.virt.hardware [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1150.749444] env[68569]: DEBUG nova.virt.hardware [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1150.749624] env[68569]: DEBUG nova.virt.hardware [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1150.749775] env[68569]: DEBUG nova.virt.hardware [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1150.749983] env[68569]: DEBUG nova.virt.hardware [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1150.750202] env[68569]: DEBUG nova.virt.hardware [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1150.750410] env[68569]: DEBUG nova.virt.hardware [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1150.750589] env[68569]: DEBUG nova.virt.hardware [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1150.750793] env[68569]: DEBUG nova.virt.hardware [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1150.757111] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Reconfiguring VM to attach interface {{(pid=68569) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1150.757457] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-941b0c10-21b6-4c46-80e7-e85ba57e21f4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.776284] env[68569]: DEBUG oslo_vmware.api [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1150.776284] env[68569]: value = "task-3167792" [ 1150.776284] env[68569]: _type = "Task" [ 1150.776284] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.787314] env[68569]: DEBUG oslo_vmware.api [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167792, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.795110] env[68569]: DEBUG oslo_concurrency.lockutils [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] Releasing lock "refresh_cache-09f4018b-f1cd-4726-b871-b110a7cf1b43" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.795505] env[68569]: DEBUG nova.compute.manager [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Received event network-changed-234f9512-640a-4c30-9ae1-166d66a910bc {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1150.795700] env[68569]: DEBUG nova.compute.manager [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Refreshing instance network info cache due to event network-changed-234f9512-640a-4c30-9ae1-166d66a910bc. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1150.795957] env[68569]: DEBUG oslo_concurrency.lockutils [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] Acquiring lock "refresh_cache-fdcdd4b5-82bd-43c9-8865-807f86789a99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.796132] env[68569]: DEBUG oslo_concurrency.lockutils [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] Acquired lock "refresh_cache-fdcdd4b5-82bd-43c9-8865-807f86789a99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.796385] env[68569]: DEBUG nova.network.neutron [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Refreshing network info cache for port 234f9512-640a-4c30-9ae1-166d66a910bc {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1150.845733] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.273s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.846385] env[68569]: DEBUG nova.compute.manager [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1150.849322] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.006s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.851979] env[68569]: INFO nova.compute.claims [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1150.870676] env[68569]: DEBUG nova.compute.manager [req-25d3d6c5-78d8-4f1e-a29a-288b4158356f req-2e6fb098-81c3-4e09-8136-a7e93eddc099 service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Received event network-changed-1c0c5e5b-c2d8-4845-8b34-e2356a2452df {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1150.870676] env[68569]: DEBUG nova.compute.manager [req-25d3d6c5-78d8-4f1e-a29a-288b4158356f req-2e6fb098-81c3-4e09-8136-a7e93eddc099 service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Refreshing instance network info cache due to event network-changed-1c0c5e5b-c2d8-4845-8b34-e2356a2452df. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1150.870849] env[68569]: DEBUG oslo_concurrency.lockutils [req-25d3d6c5-78d8-4f1e-a29a-288b4158356f req-2e6fb098-81c3-4e09-8136-a7e93eddc099 service nova] Acquiring lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.870987] env[68569]: DEBUG oslo_concurrency.lockutils [req-25d3d6c5-78d8-4f1e-a29a-288b4158356f req-2e6fb098-81c3-4e09-8136-a7e93eddc099 service nova] Acquired lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.871252] env[68569]: DEBUG nova.network.neutron [req-25d3d6c5-78d8-4f1e-a29a-288b4158356f req-2e6fb098-81c3-4e09-8136-a7e93eddc099 service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Refreshing network info cache for port 1c0c5e5b-c2d8-4845-8b34-e2356a2452df {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1150.916053] env[68569]: DEBUG nova.compute.manager [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1150.916363] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1150.917222] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c515c8a-4bfb-45a4-ab36-e133b6e4e589 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.925685] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1150.930354] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac232483-34db-483a-ae8f-770793354c40 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.934394] env[68569]: INFO nova.compute.manager [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Took 14.88 seconds to build instance. [ 1150.942522] env[68569]: DEBUG oslo_vmware.api [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1150.942522] env[68569]: value = "task-3167793" [ 1150.942522] env[68569]: _type = "Task" [ 1150.942522] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.953920] env[68569]: DEBUG oslo_vmware.api [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.959227] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: c0211ed8-5995-48f4-b339-99bd4c93254c] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1151.065602] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5214ec12-bb40-870a-d9fc-6a0138472a60, 'name': SearchDatastore_Task, 'duration_secs': 0.015101} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.065906] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.067224] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 09f4018b-f1cd-4726-b871-b110a7cf1b43/09f4018b-f1cd-4726-b871-b110a7cf1b43.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1151.067224] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65bd57fb-85c6-4f34-b1f4-4ee34115fe87 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.075382] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1151.075382] env[68569]: value = "task-3167794" [ 1151.075382] env[68569]: _type = "Task" [ 1151.075382] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.085469] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167794, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.290488] env[68569]: DEBUG oslo_vmware.api [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167792, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.359191] env[68569]: DEBUG nova.compute.utils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1151.365199] env[68569]: DEBUG nova.compute.manager [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1151.365199] env[68569]: DEBUG nova.network.neutron [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1151.437352] env[68569]: DEBUG oslo_concurrency.lockutils [None req-dc656f68-4a83-49af-baf4-6a2953cf3a8c tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.395s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.458200] env[68569]: DEBUG oslo_vmware.api [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167793, 'name': PowerOffVM_Task, 'duration_secs': 0.304447} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.459232] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1151.459578] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1151.460334] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33ab231b-7f5e-4660-bc30-3b36d716e87c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.466309] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 3551627b-9c90-43ea-bae7-d186eaa53c6b] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1151.505417] env[68569]: DEBUG nova.policy [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54ebbdfe9bfb4854a40b07d60c7a9efb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f361997374e943cfa7a8e4e4884d6c65', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1151.593025] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167794, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.601402] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1151.601402] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1151.602165] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleting the datastore file [datastore2] f5f8b054-7ee4-40f5-84de-1cee02949cd2 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1151.602718] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f2003f03-21cf-4ad5-b5fb-16ac9a16e441 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.613984] env[68569]: DEBUG oslo_vmware.api [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for the task: (returnval){ [ 1151.613984] env[68569]: value = "task-3167796" [ 1151.613984] env[68569]: _type = "Task" [ 1151.613984] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.635039] env[68569]: DEBUG oslo_vmware.api [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167796, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.724516] env[68569]: DEBUG nova.network.neutron [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Updated VIF entry in instance network info cache for port 234f9512-640a-4c30-9ae1-166d66a910bc. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1151.725137] env[68569]: DEBUG nova.network.neutron [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Updating instance_info_cache with network_info: [{"id": "234f9512-640a-4c30-9ae1-166d66a910bc", "address": "fa:16:3e:77:b3:c7", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap234f9512-64", "ovs_interfaceid": "234f9512-640a-4c30-9ae1-166d66a910bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.756104] env[68569]: DEBUG nova.compute.manager [req-c119ffc2-d07e-4adb-ab85-62531a025c17 req-3d827edb-4663-40ba-b028-10b515f376e6 service nova] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Received event network-changed-a6dc900f-8486-4e42-9982-a87df2d989b9 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1151.758320] env[68569]: DEBUG nova.compute.manager [req-c119ffc2-d07e-4adb-ab85-62531a025c17 req-3d827edb-4663-40ba-b028-10b515f376e6 service nova] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Refreshing instance network info cache due to event network-changed-a6dc900f-8486-4e42-9982-a87df2d989b9. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1151.758320] env[68569]: DEBUG oslo_concurrency.lockutils [req-c119ffc2-d07e-4adb-ab85-62531a025c17 req-3d827edb-4663-40ba-b028-10b515f376e6 service nova] Acquiring lock "refresh_cache-5de9a459-a2a2-4d78-9a66-cf819e8893b6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.758320] env[68569]: DEBUG oslo_concurrency.lockutils [req-c119ffc2-d07e-4adb-ab85-62531a025c17 req-3d827edb-4663-40ba-b028-10b515f376e6 service nova] Acquired lock "refresh_cache-5de9a459-a2a2-4d78-9a66-cf819e8893b6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.758320] env[68569]: DEBUG nova.network.neutron [req-c119ffc2-d07e-4adb-ab85-62531a025c17 req-3d827edb-4663-40ba-b028-10b515f376e6 service nova] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Refreshing network info cache for port a6dc900f-8486-4e42-9982-a87df2d989b9 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1151.789703] env[68569]: DEBUG oslo_vmware.api [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167792, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.856280] env[68569]: DEBUG nova.network.neutron [req-25d3d6c5-78d8-4f1e-a29a-288b4158356f req-2e6fb098-81c3-4e09-8136-a7e93eddc099 service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Updated VIF entry in instance network info cache for port 1c0c5e5b-c2d8-4845-8b34-e2356a2452df. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1151.856892] env[68569]: DEBUG nova.network.neutron [req-25d3d6c5-78d8-4f1e-a29a-288b4158356f req-2e6fb098-81c3-4e09-8136-a7e93eddc099 service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Updating instance_info_cache with network_info: [{"id": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "address": "fa:16:3e:ec:47:b9", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8cb9bd3-0c", "ovs_interfaceid": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b197187f-ff62-4584-bbfa-1eacf3b6d13a", "address": "fa:16:3e:3b:6c:a5", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb197187f-ff", "ovs_interfaceid": "b197187f-ff62-4584-bbfa-1eacf3b6d13a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1c0c5e5b-c2d8-4845-8b34-e2356a2452df", "address": "fa:16:3e:5e:77:17", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c0c5e5b-c2", "ovs_interfaceid": "1c0c5e5b-c2d8-4845-8b34-e2356a2452df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.865489] env[68569]: DEBUG nova.compute.manager [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1151.971338] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 367f4fe5-ffef-45f3-b00e-a5cf0418d5cd] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1151.988079] env[68569]: DEBUG nova.network.neutron [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Successfully created port: 2e2d14d6-2ae7-45de-a9ee-885c117167ee {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1152.062218] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5852df06-4d5c-45ed-889d-df3c81953544 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.070609] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6634660e-95c3-437a-88fc-2ca2b828b648 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.108287] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb045b1-320d-4955-9480-de72d3ad2932 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.114177] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167794, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.643284} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.114913] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 09f4018b-f1cd-4726-b871-b110a7cf1b43/09f4018b-f1cd-4726-b871-b110a7cf1b43.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1152.115238] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1152.115518] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a39bc9e-b79d-4a6a-8112-e1d6793fb667 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.123931] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb7c716-48e2-437e-9b5a-8850ef3ba4db {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.129365] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1152.129365] env[68569]: value = "task-3167797" [ 1152.129365] env[68569]: _type = "Task" [ 1152.129365] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.141914] env[68569]: DEBUG nova.compute.provider_tree [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1152.147074] env[68569]: DEBUG oslo_vmware.api [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Task: {'id': task-3167796, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245912} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.147612] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1152.147888] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1152.148171] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1152.148377] env[68569]: INFO nova.compute.manager [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1152.148704] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1152.151970] env[68569]: DEBUG nova.compute.manager [-] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1152.151970] env[68569]: DEBUG nova.network.neutron [-] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1152.153845] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167797, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.231490] env[68569]: DEBUG oslo_concurrency.lockutils [req-262a1c0c-7428-41d0-bc83-219b5a5ad7ce req-9a53d67d-cc27-4b46-8f44-90f0a505f4c6 service nova] Releasing lock "refresh_cache-fdcdd4b5-82bd-43c9-8865-807f86789a99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.291665] env[68569]: DEBUG oslo_vmware.api [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167792, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.359715] env[68569]: DEBUG oslo_concurrency.lockutils [req-25d3d6c5-78d8-4f1e-a29a-288b4158356f req-2e6fb098-81c3-4e09-8136-a7e93eddc099 service nova] Releasing lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.477421] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 0943cfd5-33fb-4b02-9e4d-93f18385bdae] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1152.548223] env[68569]: DEBUG nova.network.neutron [req-c119ffc2-d07e-4adb-ab85-62531a025c17 req-3d827edb-4663-40ba-b028-10b515f376e6 service nova] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Updated VIF entry in instance network info cache for port a6dc900f-8486-4e42-9982-a87df2d989b9. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1152.548758] env[68569]: DEBUG nova.network.neutron [req-c119ffc2-d07e-4adb-ab85-62531a025c17 req-3d827edb-4663-40ba-b028-10b515f376e6 service nova] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Updating instance_info_cache with network_info: [{"id": "a6dc900f-8486-4e42-9982-a87df2d989b9", "address": "fa:16:3e:07:38:7a", "network": {"id": "9b574e65-367b-4135-8ce6-e40fd22bc9f3", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1543341439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "713d88f021794769a64eef3807ade9be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6dc900f-84", "ovs_interfaceid": "a6dc900f-8486-4e42-9982-a87df2d989b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.641861] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167797, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074708} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.642235] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1152.643253] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441c96e1-a97a-43d5-a0db-8bcb0f4a1696 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.649305] env[68569]: DEBUG nova.scheduler.client.report [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1152.669881] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 09f4018b-f1cd-4726-b871-b110a7cf1b43/09f4018b-f1cd-4726-b871-b110a7cf1b43.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1152.671144] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc5756bc-6f61-4ed7-b6c1-772d790e4546 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.694413] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1152.694413] env[68569]: value = "task-3167798" [ 1152.694413] env[68569]: _type = "Task" [ 1152.694413] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.704689] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167798, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.791298] env[68569]: DEBUG oslo_vmware.api [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167792, 'name': ReconfigVM_Task, 'duration_secs': 1.829723} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.792797] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.792797] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Reconfigured VM to attach interface {{(pid=68569) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1152.880329] env[68569]: DEBUG nova.compute.manager [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1152.909583] env[68569]: DEBUG nova.virt.hardware [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1152.909841] env[68569]: DEBUG nova.virt.hardware [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1152.910042] env[68569]: DEBUG nova.virt.hardware [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1152.910256] env[68569]: DEBUG nova.virt.hardware [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1152.910403] env[68569]: DEBUG nova.virt.hardware [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1152.910551] env[68569]: DEBUG nova.virt.hardware [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1152.910761] env[68569]: DEBUG nova.virt.hardware [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1152.910918] env[68569]: DEBUG nova.virt.hardware [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1152.911157] env[68569]: DEBUG nova.virt.hardware [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1152.911340] env[68569]: DEBUG nova.virt.hardware [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1152.911515] env[68569]: DEBUG nova.virt.hardware [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1152.912487] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbc4a36-e131-4854-a5bf-d5db4b016d7a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.921451] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e420f2-d3f3-4a2d-a64e-bda910e95adc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.963583] env[68569]: DEBUG nova.network.neutron [-] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.982309] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 559a1eff-8892-4dda-a540-4a053ae0ef2b] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1153.052722] env[68569]: DEBUG oslo_concurrency.lockutils [req-c119ffc2-d07e-4adb-ab85-62531a025c17 req-3d827edb-4663-40ba-b028-10b515f376e6 service nova] Releasing lock "refresh_cache-5de9a459-a2a2-4d78-9a66-cf819e8893b6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.172142] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.323s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.172654] env[68569]: DEBUG nova.compute.manager [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1153.207968] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167798, 'name': ReconfigVM_Task, 'duration_secs': 0.397274} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.207968] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 09f4018b-f1cd-4726-b871-b110a7cf1b43/09f4018b-f1cd-4726-b871-b110a7cf1b43.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1153.207968] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f592eab-1a62-49ae-a1b6-f023d75efdea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.216759] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1153.216759] env[68569]: value = "task-3167799" [ 1153.216759] env[68569]: _type = "Task" [ 1153.216759] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.227098] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167799, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.297604] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ecdc1a4b-e7f4-4411-8c74-52bf6d292d5e tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-ee188712-b0e0-44ee-80b4-be72da32299f-1c0c5e5b-c2d8-4845-8b34-e2356a2452df" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.491s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.466599] env[68569]: INFO nova.compute.manager [-] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Took 1.31 seconds to deallocate network for instance. [ 1153.488588] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: a591b671-ca84-47b5-9831-63478d55fd07] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1153.563346] env[68569]: DEBUG nova.network.neutron [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Successfully updated port: 2e2d14d6-2ae7-45de-a9ee-885c117167ee {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1153.678164] env[68569]: DEBUG nova.compute.utils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1153.679764] env[68569]: DEBUG nova.compute.manager [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1153.679950] env[68569]: DEBUG nova.network.neutron [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1153.729102] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167799, 'name': Rename_Task, 'duration_secs': 0.158087} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.730630] env[68569]: DEBUG nova.policy [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73b1c309d1494888945f033a8c5140a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa0ab47201c64b0d87480d4ff90014f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1153.732202] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1153.732468] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1353930f-f696-4e75-8073-6afe4fffaddb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.740667] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1153.740667] env[68569]: value = "task-3167800" [ 1153.740667] env[68569]: _type = "Task" [ 1153.740667] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.749448] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167800, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.788682] env[68569]: DEBUG nova.compute.manager [req-b54b8080-1a3b-4c70-99be-223a7283599a req-ffea5683-b1b0-458b-a32c-4d4145a127fb service nova] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Received event network-vif-deleted-c350f401-6619-4da6-83e4-c0650e2cfcb6 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1153.788923] env[68569]: DEBUG nova.compute.manager [req-b54b8080-1a3b-4c70-99be-223a7283599a req-ffea5683-b1b0-458b-a32c-4d4145a127fb service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Received event network-vif-plugged-2e2d14d6-2ae7-45de-a9ee-885c117167ee {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1153.789203] env[68569]: DEBUG oslo_concurrency.lockutils [req-b54b8080-1a3b-4c70-99be-223a7283599a req-ffea5683-b1b0-458b-a32c-4d4145a127fb service nova] Acquiring lock "3363dac8-c5df-405e-8bdc-9002e2d45e05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.789467] env[68569]: DEBUG oslo_concurrency.lockutils [req-b54b8080-1a3b-4c70-99be-223a7283599a req-ffea5683-b1b0-458b-a32c-4d4145a127fb service nova] Lock "3363dac8-c5df-405e-8bdc-9002e2d45e05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.789709] env[68569]: DEBUG oslo_concurrency.lockutils [req-b54b8080-1a3b-4c70-99be-223a7283599a req-ffea5683-b1b0-458b-a32c-4d4145a127fb service nova] Lock "3363dac8-c5df-405e-8bdc-9002e2d45e05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.789915] env[68569]: DEBUG nova.compute.manager [req-b54b8080-1a3b-4c70-99be-223a7283599a req-ffea5683-b1b0-458b-a32c-4d4145a127fb service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] No waiting events found dispatching network-vif-plugged-2e2d14d6-2ae7-45de-a9ee-885c117167ee {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1153.790131] env[68569]: WARNING nova.compute.manager [req-b54b8080-1a3b-4c70-99be-223a7283599a req-ffea5683-b1b0-458b-a32c-4d4145a127fb service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Received unexpected event network-vif-plugged-2e2d14d6-2ae7-45de-a9ee-885c117167ee for instance with vm_state building and task_state spawning. [ 1153.790381] env[68569]: DEBUG nova.compute.manager [req-b54b8080-1a3b-4c70-99be-223a7283599a req-ffea5683-b1b0-458b-a32c-4d4145a127fb service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Received event network-changed-2e2d14d6-2ae7-45de-a9ee-885c117167ee {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1153.790567] env[68569]: DEBUG nova.compute.manager [req-b54b8080-1a3b-4c70-99be-223a7283599a req-ffea5683-b1b0-458b-a32c-4d4145a127fb service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Refreshing instance network info cache due to event network-changed-2e2d14d6-2ae7-45de-a9ee-885c117167ee. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1153.790774] env[68569]: DEBUG oslo_concurrency.lockutils [req-b54b8080-1a3b-4c70-99be-223a7283599a req-ffea5683-b1b0-458b-a32c-4d4145a127fb service nova] Acquiring lock "refresh_cache-3363dac8-c5df-405e-8bdc-9002e2d45e05" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.790951] env[68569]: DEBUG oslo_concurrency.lockutils [req-b54b8080-1a3b-4c70-99be-223a7283599a req-ffea5683-b1b0-458b-a32c-4d4145a127fb service nova] Acquired lock "refresh_cache-3363dac8-c5df-405e-8bdc-9002e2d45e05" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.791164] env[68569]: DEBUG nova.network.neutron [req-b54b8080-1a3b-4c70-99be-223a7283599a req-ffea5683-b1b0-458b-a32c-4d4145a127fb service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Refreshing network info cache for port 2e2d14d6-2ae7-45de-a9ee-885c117167ee {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1153.974183] env[68569]: DEBUG oslo_concurrency.lockutils [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.974575] env[68569]: DEBUG oslo_concurrency.lockutils [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.975208] env[68569]: DEBUG nova.objects.instance [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lazy-loading 'resources' on Instance uuid f5f8b054-7ee4-40f5-84de-1cee02949cd2 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.992333] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: dcb7e6c2-62c6-4897-8cbf-f60f78ae5e53] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1154.068461] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "refresh_cache-3363dac8-c5df-405e-8bdc-9002e2d45e05" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.171580] env[68569]: DEBUG nova.network.neutron [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Successfully created port: 5b205295-7e30-4590-b967-0d6e1f4d416a {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1154.184273] env[68569]: DEBUG nova.compute.manager [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1154.252834] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167800, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.363010] env[68569]: DEBUG nova.network.neutron [req-b54b8080-1a3b-4c70-99be-223a7283599a req-ffea5683-b1b0-458b-a32c-4d4145a127fb service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1154.436491] env[68569]: DEBUG nova.network.neutron [req-b54b8080-1a3b-4c70-99be-223a7283599a req-ffea5683-b1b0-458b-a32c-4d4145a127fb service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.496312] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: f84204a9-aeea-498e-9682-298e581b34e3] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1154.636014] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff07441-6893-437d-b0da-f1a43220ec18 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.644269] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a963152-6312-4f84-946d-27898f6b642b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.677286] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfb8c40-1925-4b4c-98ea-2239388fc4aa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.685830] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4349c238-1ea9-46da-b564-5b04d57c6677 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.703886] env[68569]: DEBUG nova.compute.provider_tree [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1154.753061] env[68569]: DEBUG oslo_vmware.api [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167800, 'name': PowerOnVM_Task, 'duration_secs': 0.53763} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.753385] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1154.753585] env[68569]: INFO nova.compute.manager [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Took 7.63 seconds to spawn the instance on the hypervisor. [ 1154.753775] env[68569]: DEBUG nova.compute.manager [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1154.754608] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc8789f-d203-4b41-b2a3-5f978502d31f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.764464] env[68569]: DEBUG oslo_concurrency.lockutils [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "interface-ee188712-b0e0-44ee-80b4-be72da32299f-b197187f-ff62-4584-bbfa-1eacf3b6d13a" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.764800] env[68569]: DEBUG oslo_concurrency.lockutils [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-ee188712-b0e0-44ee-80b4-be72da32299f-b197187f-ff62-4584-bbfa-1eacf3b6d13a" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.939236] env[68569]: DEBUG oslo_concurrency.lockutils [req-b54b8080-1a3b-4c70-99be-223a7283599a req-ffea5683-b1b0-458b-a32c-4d4145a127fb service nova] Releasing lock "refresh_cache-3363dac8-c5df-405e-8bdc-9002e2d45e05" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.939690] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "refresh_cache-3363dac8-c5df-405e-8bdc-9002e2d45e05" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.939943] env[68569]: DEBUG nova.network.neutron [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1155.002417] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 6824efd5-427b-420d-83d5-a1d5acd94bf9] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1155.195685] env[68569]: DEBUG nova.compute.manager [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1155.206567] env[68569]: DEBUG nova.scheduler.client.report [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1155.222500] env[68569]: DEBUG nova.virt.hardware [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1155.222899] env[68569]: DEBUG nova.virt.hardware [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1155.223064] env[68569]: DEBUG nova.virt.hardware [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1155.223254] env[68569]: DEBUG nova.virt.hardware [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1155.223398] env[68569]: DEBUG nova.virt.hardware [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1155.223542] env[68569]: DEBUG nova.virt.hardware [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1155.223753] env[68569]: DEBUG nova.virt.hardware [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1155.223908] env[68569]: DEBUG nova.virt.hardware [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1155.224093] env[68569]: DEBUG nova.virt.hardware [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1155.224256] env[68569]: DEBUG nova.virt.hardware [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1155.224430] env[68569]: DEBUG nova.virt.hardware [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1155.225341] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494c7b95-2437-4bf1-a4e3-de6fd79ebb09 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.236501] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e891c8-1304-478c-be85-185c080368e4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.271917] env[68569]: DEBUG oslo_concurrency.lockutils [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.272264] env[68569]: DEBUG oslo_concurrency.lockutils [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.275753] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209701f3-b800-455c-bcd1-2e4ebdec8331 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.278815] env[68569]: INFO nova.compute.manager [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Took 17.79 seconds to build instance. [ 1155.299471] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2944270c-c6fa-47ff-833b-0ecd250d7efb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.329495] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Reconfiguring VM to detach interface {{(pid=68569) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1155.329811] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e15784a-7812-490e-a814-6ad8e90511f6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.351534] env[68569]: DEBUG oslo_vmware.api [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1155.351534] env[68569]: value = "task-3167801" [ 1155.351534] env[68569]: _type = "Task" [ 1155.351534] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.360481] env[68569]: DEBUG oslo_vmware.api [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167801, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.472263] env[68569]: DEBUG nova.network.neutron [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1155.506302] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: a63b06a1-c24e-4013-a1f4-b227732a1e05] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1155.612261] env[68569]: DEBUG nova.network.neutron [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Updating instance_info_cache with network_info: [{"id": "2e2d14d6-2ae7-45de-a9ee-885c117167ee", "address": "fa:16:3e:f2:c3:9b", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d14d6-2a", "ovs_interfaceid": "2e2d14d6-2ae7-45de-a9ee-885c117167ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.714870] env[68569]: DEBUG oslo_concurrency.lockutils [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.740s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.738919] env[68569]: INFO nova.scheduler.client.report [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Deleted allocations for instance f5f8b054-7ee4-40f5-84de-1cee02949cd2 [ 1155.781821] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d798ca6e-e6e1-424f-abc2-1a492aba3cdd tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.299s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.873862] env[68569]: DEBUG oslo_vmware.api [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167801, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.893880] env[68569]: DEBUG nova.compute.manager [req-45e2867a-61a7-4c2f-aca4-b96bba69d714 req-171fea85-58d4-4447-9155-194df689e4bb service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Received event network-vif-plugged-5b205295-7e30-4590-b967-0d6e1f4d416a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1155.895768] env[68569]: DEBUG oslo_concurrency.lockutils [req-45e2867a-61a7-4c2f-aca4-b96bba69d714 req-171fea85-58d4-4447-9155-194df689e4bb service nova] Acquiring lock "47fa6c6a-a31f-4eea-86b0-807dba6a6b4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.896092] env[68569]: DEBUG oslo_concurrency.lockutils [req-45e2867a-61a7-4c2f-aca4-b96bba69d714 req-171fea85-58d4-4447-9155-194df689e4bb service nova] Lock "47fa6c6a-a31f-4eea-86b0-807dba6a6b4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.896313] env[68569]: DEBUG oslo_concurrency.lockutils [req-45e2867a-61a7-4c2f-aca4-b96bba69d714 req-171fea85-58d4-4447-9155-194df689e4bb service nova] Lock "47fa6c6a-a31f-4eea-86b0-807dba6a6b4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.896495] env[68569]: DEBUG nova.compute.manager [req-45e2867a-61a7-4c2f-aca4-b96bba69d714 req-171fea85-58d4-4447-9155-194df689e4bb service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] No waiting events found dispatching network-vif-plugged-5b205295-7e30-4590-b967-0d6e1f4d416a {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1155.896663] env[68569]: WARNING nova.compute.manager [req-45e2867a-61a7-4c2f-aca4-b96bba69d714 req-171fea85-58d4-4447-9155-194df689e4bb service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Received unexpected event network-vif-plugged-5b205295-7e30-4590-b967-0d6e1f4d416a for instance with vm_state building and task_state spawning. [ 1155.990496] env[68569]: DEBUG nova.network.neutron [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Successfully updated port: 5b205295-7e30-4590-b967-0d6e1f4d416a {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1156.010347] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 060fc4c8-b173-4fc4-8232-e13e3eac9dc3] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1156.115412] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "refresh_cache-3363dac8-c5df-405e-8bdc-9002e2d45e05" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.115798] env[68569]: DEBUG nova.compute.manager [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Instance network_info: |[{"id": "2e2d14d6-2ae7-45de-a9ee-885c117167ee", "address": "fa:16:3e:f2:c3:9b", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d14d6-2a", "ovs_interfaceid": "2e2d14d6-2ae7-45de-a9ee-885c117167ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1156.116307] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:c3:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f8442aa5-73db-4599-8564-b98a6ea26b9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e2d14d6-2ae7-45de-a9ee-885c117167ee', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1156.124821] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1156.125071] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1156.125357] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c910c66-7dc9-4d15-b5ba-0d894087be4a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.147488] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1156.147488] env[68569]: value = "task-3167802" [ 1156.147488] env[68569]: _type = "Task" [ 1156.147488] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.156148] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167802, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.246546] env[68569]: DEBUG oslo_concurrency.lockutils [None req-969ac4e9-f445-49e4-bd5b-e0ffed26e9b7 tempest-ServersTestJSON-1743206981 tempest-ServersTestJSON-1743206981-project-member] Lock "f5f8b054-7ee4-40f5-84de-1cee02949cd2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.842s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.363357] env[68569]: DEBUG oslo_vmware.api [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167801, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.441468] env[68569]: DEBUG nova.compute.manager [req-96aca42f-d3b2-4aee-a85e-8810294ca586 req-7cc68e1a-5ff6-4523-8779-1b6aa2960bf0 service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Received event network-changed-66a22ce3-2444-47fc-aaf8-741e718c05f0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1156.442622] env[68569]: DEBUG nova.compute.manager [req-96aca42f-d3b2-4aee-a85e-8810294ca586 req-7cc68e1a-5ff6-4523-8779-1b6aa2960bf0 service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Refreshing instance network info cache due to event network-changed-66a22ce3-2444-47fc-aaf8-741e718c05f0. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1156.442622] env[68569]: DEBUG oslo_concurrency.lockutils [req-96aca42f-d3b2-4aee-a85e-8810294ca586 req-7cc68e1a-5ff6-4523-8779-1b6aa2960bf0 service nova] Acquiring lock "refresh_cache-09f4018b-f1cd-4726-b871-b110a7cf1b43" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.442622] env[68569]: DEBUG oslo_concurrency.lockutils [req-96aca42f-d3b2-4aee-a85e-8810294ca586 req-7cc68e1a-5ff6-4523-8779-1b6aa2960bf0 service nova] Acquired lock "refresh_cache-09f4018b-f1cd-4726-b871-b110a7cf1b43" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1156.442622] env[68569]: DEBUG nova.network.neutron [req-96aca42f-d3b2-4aee-a85e-8810294ca586 req-7cc68e1a-5ff6-4523-8779-1b6aa2960bf0 service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Refreshing network info cache for port 66a22ce3-2444-47fc-aaf8-741e718c05f0 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1156.493958] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "refresh_cache-47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.494189] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "refresh_cache-47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1156.494324] env[68569]: DEBUG nova.network.neutron [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1156.513332] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 123a6895-af16-493a-afce-7ae6c2137422] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1156.659144] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167802, 'name': CreateVM_Task, 'duration_secs': 0.451522} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.659395] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1156.660257] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.660471] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1156.660876] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1156.661195] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-323ac26e-4fdd-48ba-86c3-80ce671d1488 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.666900] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1156.666900] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c5b66d-30da-268e-5d32-31ba8f2ac78e" [ 1156.666900] env[68569]: _type = "Task" [ 1156.666900] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.676898] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c5b66d-30da-268e-5d32-31ba8f2ac78e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.864506] env[68569]: DEBUG oslo_vmware.api [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167801, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.018362] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: ab021831-2cc3-4457-aa55-b55036c2a423] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1157.043182] env[68569]: DEBUG nova.network.neutron [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1157.179613] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c5b66d-30da-268e-5d32-31ba8f2ac78e, 'name': SearchDatastore_Task, 'duration_secs': 0.016541} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.180567] env[68569]: DEBUG nova.network.neutron [req-96aca42f-d3b2-4aee-a85e-8810294ca586 req-7cc68e1a-5ff6-4523-8779-1b6aa2960bf0 service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Updated VIF entry in instance network info cache for port 66a22ce3-2444-47fc-aaf8-741e718c05f0. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1157.180939] env[68569]: DEBUG nova.network.neutron [req-96aca42f-d3b2-4aee-a85e-8810294ca586 req-7cc68e1a-5ff6-4523-8779-1b6aa2960bf0 service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Updating instance_info_cache with network_info: [{"id": "66a22ce3-2444-47fc-aaf8-741e718c05f0", "address": "fa:16:3e:69:0a:26", "network": {"id": "0d875ac4-491f-4196-ae0b-dc025452b092", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-166608497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e52d937c83d46daa36746494bd7ccbe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66a22ce3-24", "ovs_interfaceid": "66a22ce3-2444-47fc-aaf8-741e718c05f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.182195] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1157.182444] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1157.182698] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.182857] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.183067] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1157.183555] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f0e4308-415e-4e41-bcda-f454d8619daa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.193343] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1157.193491] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1157.194932] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-094efc58-730e-48f6-8aa7-1a303094a641 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.201209] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1157.201209] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52444100-29aa-b8cc-1fe0-b9d9e9874973" [ 1157.201209] env[68569]: _type = "Task" [ 1157.201209] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.213326] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52444100-29aa-b8cc-1fe0-b9d9e9874973, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.263501] env[68569]: DEBUG nova.network.neutron [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Updating instance_info_cache with network_info: [{"id": "5b205295-7e30-4590-b967-0d6e1f4d416a", "address": "fa:16:3e:05:b6:6a", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b205295-7e", "ovs_interfaceid": "5b205295-7e30-4590-b967-0d6e1f4d416a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.363808] env[68569]: DEBUG oslo_vmware.api [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167801, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.523777] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: b770fbd1-579a-4e3e-a5c9-9f030695f057] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1157.685137] env[68569]: DEBUG oslo_concurrency.lockutils [req-96aca42f-d3b2-4aee-a85e-8810294ca586 req-7cc68e1a-5ff6-4523-8779-1b6aa2960bf0 service nova] Releasing lock "refresh_cache-09f4018b-f1cd-4726-b871-b110a7cf1b43" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1157.714340] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52444100-29aa-b8cc-1fe0-b9d9e9874973, 'name': SearchDatastore_Task, 'duration_secs': 0.017265} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.715233] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e452387b-317e-48f3-b1ca-a9df112cdf1f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.720870] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1157.720870] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b9a832-51e1-5212-193f-54fc85dcea52" [ 1157.720870] env[68569]: _type = "Task" [ 1157.720870] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.728525] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b9a832-51e1-5212-193f-54fc85dcea52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.766472] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "refresh_cache-47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1157.766935] env[68569]: DEBUG nova.compute.manager [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Instance network_info: |[{"id": "5b205295-7e30-4590-b967-0d6e1f4d416a", "address": "fa:16:3e:05:b6:6a", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b205295-7e", "ovs_interfaceid": "5b205295-7e30-4590-b967-0d6e1f4d416a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1157.767298] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:b6:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b205295-7e30-4590-b967-0d6e1f4d416a', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1157.775175] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1157.775399] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1157.775623] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0e54ab4-cbef-4e57-83cd-4c1356fc4793 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.797258] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1157.797258] env[68569]: value = "task-3167803" [ 1157.797258] env[68569]: _type = "Task" [ 1157.797258] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.863769] env[68569]: DEBUG oslo_vmware.api [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167801, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.932723] env[68569]: DEBUG nova.compute.manager [req-e95ccb7b-0b75-48bf-ac3a-7bc6143c6ed1 req-6dce3716-cd56-4081-b0a5-4ab05d6c3250 service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Received event network-changed-5b205295-7e30-4590-b967-0d6e1f4d416a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1157.932723] env[68569]: DEBUG nova.compute.manager [req-e95ccb7b-0b75-48bf-ac3a-7bc6143c6ed1 req-6dce3716-cd56-4081-b0a5-4ab05d6c3250 service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Refreshing instance network info cache due to event network-changed-5b205295-7e30-4590-b967-0d6e1f4d416a. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1157.932723] env[68569]: DEBUG oslo_concurrency.lockutils [req-e95ccb7b-0b75-48bf-ac3a-7bc6143c6ed1 req-6dce3716-cd56-4081-b0a5-4ab05d6c3250 service nova] Acquiring lock "refresh_cache-47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.933153] env[68569]: DEBUG oslo_concurrency.lockutils [req-e95ccb7b-0b75-48bf-ac3a-7bc6143c6ed1 req-6dce3716-cd56-4081-b0a5-4ab05d6c3250 service nova] Acquired lock "refresh_cache-47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.933153] env[68569]: DEBUG nova.network.neutron [req-e95ccb7b-0b75-48bf-ac3a-7bc6143c6ed1 req-6dce3716-cd56-4081-b0a5-4ab05d6c3250 service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Refreshing network info cache for port 5b205295-7e30-4590-b967-0d6e1f4d416a {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1158.029085] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 53cc8dbd-c163-403a-9286-e1f8ad939f94] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1158.205759] env[68569]: DEBUG oslo_vmware.rw_handles [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b40803-491c-16e0-8a9b-674cb21b8ccb/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1158.206917] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25aaf876-27d8-4cc6-8c62-631e712cf6c1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.213796] env[68569]: DEBUG oslo_vmware.rw_handles [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b40803-491c-16e0-8a9b-674cb21b8ccb/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1158.213969] env[68569]: ERROR oslo_vmware.rw_handles [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b40803-491c-16e0-8a9b-674cb21b8ccb/disk-0.vmdk due to incomplete transfer. [ 1158.214279] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-340d7ec4-885a-4bab-89a3-7ac83065a0e3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.225944] env[68569]: DEBUG oslo_vmware.rw_handles [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b40803-491c-16e0-8a9b-674cb21b8ccb/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1158.226184] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Uploaded image f6c1f108-99b5-4bc9-80cd-889b31900043 to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1158.228582] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1158.229239] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-aba506ae-bc7d-40a1-a2b8-08cad48e850b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.234340] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b9a832-51e1-5212-193f-54fc85dcea52, 'name': SearchDatastore_Task, 'duration_secs': 0.029222} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.235049] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.235340] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 3363dac8-c5df-405e-8bdc-9002e2d45e05/3363dac8-c5df-405e-8bdc-9002e2d45e05.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1158.235591] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cfe75703-43b1-4122-955a-6f249928371b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.239019] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1158.239019] env[68569]: value = "task-3167804" [ 1158.239019] env[68569]: _type = "Task" [ 1158.239019] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.243492] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1158.243492] env[68569]: value = "task-3167805" [ 1158.243492] env[68569]: _type = "Task" [ 1158.243492] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.251378] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167804, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.256529] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167805, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.307774] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167803, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.364290] env[68569]: DEBUG oslo_vmware.api [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167801, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.534992] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: fd803a5e-8dbd-449e-b45d-1e6410a286e8] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1158.730334] env[68569]: DEBUG nova.network.neutron [req-e95ccb7b-0b75-48bf-ac3a-7bc6143c6ed1 req-6dce3716-cd56-4081-b0a5-4ab05d6c3250 service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Updated VIF entry in instance network info cache for port 5b205295-7e30-4590-b967-0d6e1f4d416a. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1158.730712] env[68569]: DEBUG nova.network.neutron [req-e95ccb7b-0b75-48bf-ac3a-7bc6143c6ed1 req-6dce3716-cd56-4081-b0a5-4ab05d6c3250 service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Updating instance_info_cache with network_info: [{"id": "5b205295-7e30-4590-b967-0d6e1f4d416a", "address": "fa:16:3e:05:b6:6a", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b205295-7e", "ovs_interfaceid": "5b205295-7e30-4590-b967-0d6e1f4d416a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.755284] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167804, 'name': Destroy_Task, 'duration_secs': 0.451811} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.758779] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Destroyed the VM [ 1158.759129] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1158.759434] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167805, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.759955] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-20175599-1531-416d-91b1-c1a86c48b3c4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.766953] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1158.766953] env[68569]: value = "task-3167806" [ 1158.766953] env[68569]: _type = "Task" [ 1158.766953] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.775807] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167806, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.807945] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167803, 'name': CreateVM_Task, 'duration_secs': 0.61152} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.808170] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1158.808871] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.809073] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1158.809407] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1158.809674] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-402a016d-6e54-4043-b1a1-702bd59e67cd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.815265] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1158.815265] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a11c9f-4b80-04b0-9694-5e845f188e04" [ 1158.815265] env[68569]: _type = "Task" [ 1158.815265] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.823831] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a11c9f-4b80-04b0-9694-5e845f188e04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.864245] env[68569]: DEBUG oslo_vmware.api [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167801, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.039565] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: b40c9dec-cebc-4d23-8df4-96e804333706] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1159.233532] env[68569]: DEBUG oslo_concurrency.lockutils [req-e95ccb7b-0b75-48bf-ac3a-7bc6143c6ed1 req-6dce3716-cd56-4081-b0a5-4ab05d6c3250 service nova] Releasing lock "refresh_cache-47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.255062] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167805, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.65238} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.255312] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 3363dac8-c5df-405e-8bdc-9002e2d45e05/3363dac8-c5df-405e-8bdc-9002e2d45e05.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1159.255521] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1159.255770] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-feb4bc56-8c87-47cf-a508-52952f8e3549 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.262322] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1159.262322] env[68569]: value = "task-3167807" [ 1159.262322] env[68569]: _type = "Task" [ 1159.262322] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.272188] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167807, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.277040] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167806, 'name': RemoveSnapshot_Task, 'duration_secs': 0.384071} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.277279] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1159.277542] env[68569]: DEBUG nova.compute.manager [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1159.278407] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7b0071-8c3c-4159-b289-f69ec8944dc1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.325387] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a11c9f-4b80-04b0-9694-5e845f188e04, 'name': SearchDatastore_Task, 'duration_secs': 0.046728} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.325598] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.325839] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1159.326075] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.326224] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1159.326429] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1159.326697] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7365f4da-756e-4900-8a81-512fe7fef01e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.341621] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1159.341798] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1159.342738] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb2f617c-a7c3-4cbd-aaf4-3f81138e7164 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.348070] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1159.348070] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ae5831-c743-1178-fda2-847faa3460ae" [ 1159.348070] env[68569]: _type = "Task" [ 1159.348070] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.355120] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ae5831-c743-1178-fda2-847faa3460ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.362338] env[68569]: DEBUG oslo_vmware.api [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167801, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.543205] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 398dd3c7-c630-4a29-b204-80f6fb394ce8] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1159.772646] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167807, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070054} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.772936] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1159.773764] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b35a69-b511-4b1f-9de1-4a029b829528 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.798172] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 3363dac8-c5df-405e-8bdc-9002e2d45e05/3363dac8-c5df-405e-8bdc-9002e2d45e05.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1159.798783] env[68569]: INFO nova.compute.manager [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Shelve offloading [ 1159.800314] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba7f117b-b216-470e-b2d0-76c521ec50fe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.821924] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1159.821924] env[68569]: value = "task-3167808" [ 1159.821924] env[68569]: _type = "Task" [ 1159.821924] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.831611] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167808, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.858508] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ae5831-c743-1178-fda2-847faa3460ae, 'name': SearchDatastore_Task, 'duration_secs': 0.075778} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.862639] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-414a1ed7-d43e-4300-b97e-145ec6250332 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.870695] env[68569]: DEBUG oslo_vmware.api [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167801, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.872154] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1159.872154] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524ab395-e4ad-e14c-bd7e-151d4bda1e81" [ 1159.872154] env[68569]: _type = "Task" [ 1159.872154] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.880586] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524ab395-e4ad-e14c-bd7e-151d4bda1e81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.047347] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: de2b0206-0c73-4275-89ff-37199520dd71] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1160.317739] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1160.318115] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-94f53750-30c8-489f-b508-e6bf2abcaea8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.327209] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1160.327209] env[68569]: value = "task-3167809" [ 1160.327209] env[68569]: _type = "Task" [ 1160.327209] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.334256] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.340109] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] VM already powered off {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1160.340335] env[68569]: DEBUG nova.compute.manager [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1160.341121] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4c71a0-57d2-4f58-9029-26d11fcab19c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.346866] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.347044] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.347227] env[68569]: DEBUG nova.network.neutron [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1160.368089] env[68569]: DEBUG oslo_vmware.api [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167801, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.380736] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524ab395-e4ad-e14c-bd7e-151d4bda1e81, 'name': SearchDatastore_Task, 'duration_secs': 0.061333} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.380997] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.381278] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d/47fa6c6a-a31f-4eea-86b0-807dba6a6b4d.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1160.381529] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0865918e-a29b-464b-8adf-314f9e50606f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.387310] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1160.387310] env[68569]: value = "task-3167810" [ 1160.387310] env[68569]: _type = "Task" [ 1160.387310] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.394920] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167810, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.551712] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: fb97d2dd-d42a-42e8-9a36-5c913a58b891] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1160.832883] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167808, 'name': ReconfigVM_Task, 'duration_secs': 0.557359} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.833200] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 3363dac8-c5df-405e-8bdc-9002e2d45e05/3363dac8-c5df-405e-8bdc-9002e2d45e05.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1160.833890] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d64ddac4-2ee0-4ab4-8c85-3d6ca2ea0766 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.841064] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1160.841064] env[68569]: value = "task-3167811" [ 1160.841064] env[68569]: _type = "Task" [ 1160.841064] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.853778] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167811, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.868882] env[68569]: DEBUG oslo_vmware.api [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167801, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.898768] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167810, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468385} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.901449] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d/47fa6c6a-a31f-4eea-86b0-807dba6a6b4d.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1160.901681] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1160.901949] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c54c79c8-e7d6-4968-9fc9-8b91b8d904dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.907969] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1160.907969] env[68569]: value = "task-3167812" [ 1160.907969] env[68569]: _type = "Task" [ 1160.907969] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.916385] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167812, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.056218] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.056442] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Cleaning up deleted instances with incomplete migration {{(pid=68569) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11879}} [ 1161.103010] env[68569]: DEBUG nova.network.neutron [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Updating instance_info_cache with network_info: [{"id": "8b05f57b-2ff2-49af-8333-0047f5230208", "address": "fa:16:3e:54:80:53", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b05f57b-2f", "ovs_interfaceid": "8b05f57b-2ff2-49af-8333-0047f5230208", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.350933] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167811, 'name': Rename_Task, 'duration_secs': 0.139684} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.351239] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1161.351484] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2bb6efe-44ce-42aa-bdd5-f39f191d1e49 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.357624] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1161.357624] env[68569]: value = "task-3167813" [ 1161.357624] env[68569]: _type = "Task" [ 1161.357624] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.367345] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167813, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.370062] env[68569]: DEBUG oslo_vmware.api [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167801, 'name': ReconfigVM_Task, 'duration_secs': 5.756574} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.370287] env[68569]: DEBUG oslo_concurrency.lockutils [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.370493] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Reconfigured VM to detach interface {{(pid=68569) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1161.417161] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167812, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064446} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.417424] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1161.418199] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b8dfc5-3bda-4ec5-b31c-9b969ef8be3e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.439064] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d/47fa6c6a-a31f-4eea-86b0-807dba6a6b4d.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1161.439332] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d22b654-44d7-4c9c-a5dd-ffdaad530eca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.459152] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1161.459152] env[68569]: value = "task-3167814" [ 1161.459152] env[68569]: _type = "Task" [ 1161.459152] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.466548] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167814, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.606386] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.618161] env[68569]: DEBUG nova.compute.manager [req-48bdd050-952c-47b1-baf9-0766c747b775 req-a53f5ecb-d05f-446b-a924-1852c919e67f service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Received event network-vif-deleted-b197187f-ff62-4584-bbfa-1eacf3b6d13a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1161.618369] env[68569]: INFO nova.compute.manager [req-48bdd050-952c-47b1-baf9-0766c747b775 req-a53f5ecb-d05f-446b-a924-1852c919e67f service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Neutron deleted interface b197187f-ff62-4584-bbfa-1eacf3b6d13a; detaching it from the instance and deleting it from the info cache [ 1161.618652] env[68569]: DEBUG nova.network.neutron [req-48bdd050-952c-47b1-baf9-0766c747b775 req-a53f5ecb-d05f-446b-a924-1852c919e67f service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Updating instance_info_cache with network_info: [{"id": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "address": "fa:16:3e:ec:47:b9", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8cb9bd3-0c", "ovs_interfaceid": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1c0c5e5b-c2d8-4845-8b34-e2356a2452df", "address": "fa:16:3e:5e:77:17", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c0c5e5b-c2", "ovs_interfaceid": "1c0c5e5b-c2d8-4845-8b34-e2356a2452df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.868528] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167813, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.937661] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1161.938741] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbcbb744-2366-4bd5-a26f-9611d8ea0c2a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.946397] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1161.946778] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef48189a-1307-4bb8-84e8-3f8900853af7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.970876] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167814, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.012182] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1162.012182] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1162.012324] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleting the datastore file [datastore1] 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1162.012610] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85081b7b-d85b-413f-bfc4-d4a26a4d6c11 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.018835] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1162.018835] env[68569]: value = "task-3167816" [ 1162.018835] env[68569]: _type = "Task" [ 1162.018835] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.027737] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167816, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.120862] env[68569]: DEBUG oslo_concurrency.lockutils [req-48bdd050-952c-47b1-baf9-0766c747b775 req-a53f5ecb-d05f-446b-a924-1852c919e67f service nova] Acquiring lock "ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.121127] env[68569]: DEBUG oslo_concurrency.lockutils [req-48bdd050-952c-47b1-baf9-0766c747b775 req-a53f5ecb-d05f-446b-a924-1852c919e67f service nova] Acquired lock "ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.122089] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843429b5-b6ec-42d1-badd-bace288d4245 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.140015] env[68569]: DEBUG oslo_concurrency.lockutils [req-48bdd050-952c-47b1-baf9-0766c747b775 req-a53f5ecb-d05f-446b-a924-1852c919e67f service nova] Releasing lock "ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.140373] env[68569]: WARNING nova.compute.manager [req-48bdd050-952c-47b1-baf9-0766c747b775 req-a53f5ecb-d05f-446b-a924-1852c919e67f service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Detach interface failed, port_id=b197187f-ff62-4584-bbfa-1eacf3b6d13a, reason: No device with interface-id b197187f-ff62-4584-bbfa-1eacf3b6d13a exists on VM: nova.exception.NotFound: No device with interface-id b197187f-ff62-4584-bbfa-1eacf3b6d13a exists on VM [ 1162.368023] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167813, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.469513] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167814, 'name': ReconfigVM_Task, 'duration_secs': 0.598972} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.469746] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d/47fa6c6a-a31f-4eea-86b0-807dba6a6b4d.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1162.470385] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1129d145-7117-449d-b757-b7cca097d581 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.476694] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1162.476694] env[68569]: value = "task-3167817" [ 1162.476694] env[68569]: _type = "Task" [ 1162.476694] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.484630] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167817, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.529977] env[68569]: DEBUG oslo_vmware.api [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167816, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.275994} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.530304] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1162.530527] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1162.530741] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1162.554123] env[68569]: INFO nova.scheduler.client.report [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleted allocations for instance 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a [ 1162.595310] env[68569]: DEBUG oslo_concurrency.lockutils [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.595514] env[68569]: DEBUG oslo_concurrency.lockutils [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.595779] env[68569]: DEBUG nova.network.neutron [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1162.762740] env[68569]: DEBUG oslo_concurrency.lockutils [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "ee188712-b0e0-44ee-80b4-be72da32299f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.763176] env[68569]: DEBUG oslo_concurrency.lockutils [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "ee188712-b0e0-44ee-80b4-be72da32299f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.763176] env[68569]: DEBUG oslo_concurrency.lockutils [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "ee188712-b0e0-44ee-80b4-be72da32299f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.763366] env[68569]: DEBUG oslo_concurrency.lockutils [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "ee188712-b0e0-44ee-80b4-be72da32299f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.763536] env[68569]: DEBUG oslo_concurrency.lockutils [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "ee188712-b0e0-44ee-80b4-be72da32299f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.766055] env[68569]: INFO nova.compute.manager [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Terminating instance [ 1162.868720] env[68569]: DEBUG oslo_vmware.api [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167813, 'name': PowerOnVM_Task, 'duration_secs': 1.457579} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.868981] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1162.869197] env[68569]: INFO nova.compute.manager [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Took 9.99 seconds to spawn the instance on the hypervisor. [ 1162.869381] env[68569]: DEBUG nova.compute.manager [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1162.870257] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585753ed-73fa-4c2f-853d-ac670636f572 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.986767] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167817, 'name': Rename_Task, 'duration_secs': 0.159732} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.987070] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1162.987318] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a9b5483b-3a8b-4b8d-9497-3b74fd8c80d4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.993204] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1162.993204] env[68569]: value = "task-3167818" [ 1162.993204] env[68569]: _type = "Task" [ 1162.993204] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.001262] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167818, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.058578] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.058668] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.058873] env[68569]: DEBUG nova.objects.instance [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lazy-loading 'resources' on Instance uuid 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1163.272170] env[68569]: DEBUG nova.compute.manager [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1163.272170] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1163.272170] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf63968-ab69-4e78-bec7-c6d24449e99b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.280017] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1163.280017] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-035a1eff-c0b6-4411-a778-546e927a9bee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.288557] env[68569]: DEBUG oslo_vmware.api [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1163.288557] env[68569]: value = "task-3167819" [ 1163.288557] env[68569]: _type = "Task" [ 1163.288557] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.296272] env[68569]: DEBUG oslo_vmware.api [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167819, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.316873] env[68569]: DEBUG nova.compute.manager [req-f889f7cb-6f67-4af6-a6a9-ea6f3f66a90f req-db01719f-5742-41f4-b36f-bd0ad78a0033 service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Received event network-changed-2e2d14d6-2ae7-45de-a9ee-885c117167ee {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1163.317094] env[68569]: DEBUG nova.compute.manager [req-f889f7cb-6f67-4af6-a6a9-ea6f3f66a90f req-db01719f-5742-41f4-b36f-bd0ad78a0033 service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Refreshing instance network info cache due to event network-changed-2e2d14d6-2ae7-45de-a9ee-885c117167ee. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1163.317390] env[68569]: DEBUG oslo_concurrency.lockutils [req-f889f7cb-6f67-4af6-a6a9-ea6f3f66a90f req-db01719f-5742-41f4-b36f-bd0ad78a0033 service nova] Acquiring lock "refresh_cache-3363dac8-c5df-405e-8bdc-9002e2d45e05" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.317533] env[68569]: DEBUG oslo_concurrency.lockutils [req-f889f7cb-6f67-4af6-a6a9-ea6f3f66a90f req-db01719f-5742-41f4-b36f-bd0ad78a0033 service nova] Acquired lock "refresh_cache-3363dac8-c5df-405e-8bdc-9002e2d45e05" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.317750] env[68569]: DEBUG nova.network.neutron [req-f889f7cb-6f67-4af6-a6a9-ea6f3f66a90f req-db01719f-5742-41f4-b36f-bd0ad78a0033 service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Refreshing network info cache for port 2e2d14d6-2ae7-45de-a9ee-885c117167ee {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1163.390765] env[68569]: INFO nova.compute.manager [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Took 16.30 seconds to build instance. [ 1163.392015] env[68569]: INFO nova.network.neutron [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Port 1c0c5e5b-c2d8-4845-8b34-e2356a2452df from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1163.392431] env[68569]: DEBUG nova.network.neutron [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Updating instance_info_cache with network_info: [{"id": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "address": "fa:16:3e:ec:47:b9", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8cb9bd3-0c", "ovs_interfaceid": "b8cb9bd3-0cdd-4183-b5f7-e26fe241764b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.505414] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167818, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.561760] env[68569]: DEBUG nova.objects.instance [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lazy-loading 'numa_topology' on Instance uuid 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1163.648115] env[68569]: DEBUG nova.compute.manager [req-b8265aa6-e1b7-47ed-bd23-0a09cc93f9ca req-449c6436-8c31-4b4f-93e5-4fa91692b3f7 service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Received event network-vif-unplugged-8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1163.648388] env[68569]: DEBUG oslo_concurrency.lockutils [req-b8265aa6-e1b7-47ed-bd23-0a09cc93f9ca req-449c6436-8c31-4b4f-93e5-4fa91692b3f7 service nova] Acquiring lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.648388] env[68569]: DEBUG oslo_concurrency.lockutils [req-b8265aa6-e1b7-47ed-bd23-0a09cc93f9ca req-449c6436-8c31-4b4f-93e5-4fa91692b3f7 service nova] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.648657] env[68569]: DEBUG oslo_concurrency.lockutils [req-b8265aa6-e1b7-47ed-bd23-0a09cc93f9ca req-449c6436-8c31-4b4f-93e5-4fa91692b3f7 service nova] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.648886] env[68569]: DEBUG nova.compute.manager [req-b8265aa6-e1b7-47ed-bd23-0a09cc93f9ca req-449c6436-8c31-4b4f-93e5-4fa91692b3f7 service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] No waiting events found dispatching network-vif-unplugged-8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1163.649156] env[68569]: WARNING nova.compute.manager [req-b8265aa6-e1b7-47ed-bd23-0a09cc93f9ca req-449c6436-8c31-4b4f-93e5-4fa91692b3f7 service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Received unexpected event network-vif-unplugged-8b05f57b-2ff2-49af-8333-0047f5230208 for instance with vm_state shelved_offloaded and task_state unshelving. [ 1163.649357] env[68569]: DEBUG nova.compute.manager [req-b8265aa6-e1b7-47ed-bd23-0a09cc93f9ca req-449c6436-8c31-4b4f-93e5-4fa91692b3f7 service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Received event network-changed-8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1163.649548] env[68569]: DEBUG nova.compute.manager [req-b8265aa6-e1b7-47ed-bd23-0a09cc93f9ca req-449c6436-8c31-4b4f-93e5-4fa91692b3f7 service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Refreshing instance network info cache due to event network-changed-8b05f57b-2ff2-49af-8333-0047f5230208. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1163.649770] env[68569]: DEBUG oslo_concurrency.lockutils [req-b8265aa6-e1b7-47ed-bd23-0a09cc93f9ca req-449c6436-8c31-4b4f-93e5-4fa91692b3f7 service nova] Acquiring lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.649947] env[68569]: DEBUG oslo_concurrency.lockutils [req-b8265aa6-e1b7-47ed-bd23-0a09cc93f9ca req-449c6436-8c31-4b4f-93e5-4fa91692b3f7 service nova] Acquired lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.650152] env[68569]: DEBUG nova.network.neutron [req-b8265aa6-e1b7-47ed-bd23-0a09cc93f9ca req-449c6436-8c31-4b4f-93e5-4fa91692b3f7 service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Refreshing network info cache for port 8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1163.797661] env[68569]: DEBUG oslo_vmware.api [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167819, 'name': PowerOffVM_Task, 'duration_secs': 0.24933} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.798098] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1163.798155] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1163.798406] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ef5083c-d8bb-4363-86b2-07491d265b06 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.892073] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d3e297ad-4f9a-461c-a66d-6b75ed89f67a tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "3363dac8-c5df-405e-8bdc-9002e2d45e05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.808s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.894768] env[68569]: DEBUG oslo_concurrency.lockutils [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "refresh_cache-ee188712-b0e0-44ee-80b4-be72da32299f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.929213] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1163.929299] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1163.929481] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Deleting the datastore file [datastore1] ee188712-b0e0-44ee-80b4-be72da32299f {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1163.929777] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50b82500-5739-4659-9b3a-bc6db365f460 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.936407] env[68569]: DEBUG oslo_vmware.api [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1163.936407] env[68569]: value = "task-3167821" [ 1163.936407] env[68569]: _type = "Task" [ 1163.936407] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.944420] env[68569]: DEBUG oslo_vmware.api [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167821, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.004256] env[68569]: DEBUG oslo_vmware.api [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167818, 'name': PowerOnVM_Task, 'duration_secs': 0.602023} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.004527] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1164.004726] env[68569]: INFO nova.compute.manager [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Took 8.81 seconds to spawn the instance on the hypervisor. [ 1164.004938] env[68569]: DEBUG nova.compute.manager [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1164.005689] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c204e35-d46a-4b5d-b2a5-8a7ddc032ea3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.014578] env[68569]: DEBUG nova.network.neutron [req-f889f7cb-6f67-4af6-a6a9-ea6f3f66a90f req-db01719f-5742-41f4-b36f-bd0ad78a0033 service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Updated VIF entry in instance network info cache for port 2e2d14d6-2ae7-45de-a9ee-885c117167ee. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1164.014885] env[68569]: DEBUG nova.network.neutron [req-f889f7cb-6f67-4af6-a6a9-ea6f3f66a90f req-db01719f-5742-41f4-b36f-bd0ad78a0033 service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Updating instance_info_cache with network_info: [{"id": "2e2d14d6-2ae7-45de-a9ee-885c117167ee", "address": "fa:16:3e:f2:c3:9b", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d14d6-2a", "ovs_interfaceid": "2e2d14d6-2ae7-45de-a9ee-885c117167ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.064555] env[68569]: DEBUG nova.objects.base [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Object Instance<6dbe8a18-c317-4b36-bd6f-922ce9f85b6a> lazy-loaded attributes: resources,numa_topology {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1164.190314] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b2dbc1-71bf-485d-a4d5-e4cba25b5d4e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.198394] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17c1816-aa25-401f-8cfe-333c2425bcca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.230704] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83dc6e70-790b-49ff-b141-aba79acad5ef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.238614] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20536f5a-5edd-432b-84b9-c1473dcc6e9c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.254598] env[68569]: DEBUG nova.compute.provider_tree [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1164.401118] env[68569]: DEBUG oslo_concurrency.lockutils [None req-442bb565-d66b-4828-b1ea-e88ca5046dde tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-ee188712-b0e0-44ee-80b4-be72da32299f-b197187f-ff62-4584-bbfa-1eacf3b6d13a" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.636s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.413736] env[68569]: DEBUG nova.network.neutron [req-b8265aa6-e1b7-47ed-bd23-0a09cc93f9ca req-449c6436-8c31-4b4f-93e5-4fa91692b3f7 service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Updated VIF entry in instance network info cache for port 8b05f57b-2ff2-49af-8333-0047f5230208. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1164.414246] env[68569]: DEBUG nova.network.neutron [req-b8265aa6-e1b7-47ed-bd23-0a09cc93f9ca req-449c6436-8c31-4b4f-93e5-4fa91692b3f7 service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Updating instance_info_cache with network_info: [{"id": "8b05f57b-2ff2-49af-8333-0047f5230208", "address": "fa:16:3e:54:80:53", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap8b05f57b-2f", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.445807] env[68569]: DEBUG oslo_vmware.api [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167821, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.366105} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.446107] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1164.446278] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1164.446454] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1164.446618] env[68569]: INFO nova.compute.manager [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1164.446866] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1164.447066] env[68569]: DEBUG nova.compute.manager [-] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1164.447125] env[68569]: DEBUG nova.network.neutron [-] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1164.495462] env[68569]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 1c0c5e5b-c2d8-4845-8b34-e2356a2452df could not be found.", "detail": ""}} {{(pid=68569) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1164.495462] env[68569]: DEBUG nova.network.neutron [-] Unable to show port 1c0c5e5b-c2d8-4845-8b34-e2356a2452df as it no longer exists. {{(pid=68569) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1164.522487] env[68569]: DEBUG oslo_concurrency.lockutils [req-f889f7cb-6f67-4af6-a6a9-ea6f3f66a90f req-db01719f-5742-41f4-b36f-bd0ad78a0033 service nova] Releasing lock "refresh_cache-3363dac8-c5df-405e-8bdc-9002e2d45e05" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.527075] env[68569]: INFO nova.compute.manager [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Took 14.70 seconds to build instance. [ 1164.545166] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.758328] env[68569]: DEBUG nova.scheduler.client.report [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1164.917282] env[68569]: DEBUG oslo_concurrency.lockutils [req-b8265aa6-e1b7-47ed-bd23-0a09cc93f9ca req-449c6436-8c31-4b4f-93e5-4fa91692b3f7 service nova] Releasing lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.917738] env[68569]: DEBUG nova.compute.manager [req-b8265aa6-e1b7-47ed-bd23-0a09cc93f9ca req-449c6436-8c31-4b4f-93e5-4fa91692b3f7 service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Received event network-vif-deleted-1c0c5e5b-c2d8-4845-8b34-e2356a2452df {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1165.029313] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c006b5ea-cb75-4258-b4d5-7a4541996da8 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.218s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.264141] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.205s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.347839] env[68569]: DEBUG nova.compute.manager [req-e8736dc4-9cb1-4f92-9349-28d4f2378e97 req-148e9348-27a6-4318-a8bc-17c7c6af2c41 service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Received event network-changed-5b205295-7e30-4590-b967-0d6e1f4d416a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1165.347839] env[68569]: DEBUG nova.compute.manager [req-e8736dc4-9cb1-4f92-9349-28d4f2378e97 req-148e9348-27a6-4318-a8bc-17c7c6af2c41 service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Refreshing instance network info cache due to event network-changed-5b205295-7e30-4590-b967-0d6e1f4d416a. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1165.347839] env[68569]: DEBUG oslo_concurrency.lockutils [req-e8736dc4-9cb1-4f92-9349-28d4f2378e97 req-148e9348-27a6-4318-a8bc-17c7c6af2c41 service nova] Acquiring lock "refresh_cache-47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.347839] env[68569]: DEBUG oslo_concurrency.lockutils [req-e8736dc4-9cb1-4f92-9349-28d4f2378e97 req-148e9348-27a6-4318-a8bc-17c7c6af2c41 service nova] Acquired lock "refresh_cache-47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.347839] env[68569]: DEBUG nova.network.neutron [req-e8736dc4-9cb1-4f92-9349-28d4f2378e97 req-148e9348-27a6-4318-a8bc-17c7c6af2c41 service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Refreshing network info cache for port 5b205295-7e30-4590-b967-0d6e1f4d416a {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1165.389117] env[68569]: DEBUG nova.network.neutron [-] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.554597] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1165.679338] env[68569]: DEBUG nova.compute.manager [req-d0cf99c3-658c-4d92-93b2-12004db7e22d req-20ae24a5-dccb-4b5a-86ef-b8a6e2c5a888 service nova] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Received event network-vif-deleted-b8cb9bd3-0cdd-4183-b5f7-e26fe241764b {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1165.773816] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2e6e7c87-fab2-4db2-b412-89d633645187 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.060s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.774090] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.229s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.774123] env[68569]: INFO nova.compute.manager [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Unshelving [ 1165.892498] env[68569]: INFO nova.compute.manager [-] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Took 1.45 seconds to deallocate network for instance. [ 1166.051129] env[68569]: DEBUG nova.network.neutron [req-e8736dc4-9cb1-4f92-9349-28d4f2378e97 req-148e9348-27a6-4318-a8bc-17c7c6af2c41 service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Updated VIF entry in instance network info cache for port 5b205295-7e30-4590-b967-0d6e1f4d416a. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1166.051515] env[68569]: DEBUG nova.network.neutron [req-e8736dc4-9cb1-4f92-9349-28d4f2378e97 req-148e9348-27a6-4318-a8bc-17c7c6af2c41 service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Updating instance_info_cache with network_info: [{"id": "5b205295-7e30-4590-b967-0d6e1f4d416a", "address": "fa:16:3e:05:b6:6a", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b205295-7e", "ovs_interfaceid": "5b205295-7e30-4590-b967-0d6e1f4d416a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.399329] env[68569]: DEBUG oslo_concurrency.lockutils [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.399625] env[68569]: DEBUG oslo_concurrency.lockutils [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.399846] env[68569]: DEBUG nova.objects.instance [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'resources' on Instance uuid ee188712-b0e0-44ee-80b4-be72da32299f {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.554530] env[68569]: DEBUG oslo_concurrency.lockutils [req-e8736dc4-9cb1-4f92-9349-28d4f2378e97 req-148e9348-27a6-4318-a8bc-17c7c6af2c41 service nova] Releasing lock "refresh_cache-47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.783661] env[68569]: DEBUG nova.compute.utils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1167.053379] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944ab55d-c081-4fb2-b3ab-1f2d866caa0c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.065518] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50a51d7-8ddb-4eb8-a5ac-d0eee6cdaae3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.097347] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d04c30-c5da-450d-9164-fa5577d12196 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.104532] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019c2278-fe45-4876-906f-95a6415a1134 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.119342] env[68569]: DEBUG nova.compute.provider_tree [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.287137] env[68569]: INFO nova.virt.block_device [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Booting with volume 60cd4f2b-f681-462c-959e-39f6ce745f96 at /dev/sdb [ 1167.321383] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e71f741b-51bf-49c2-8ea1-2424bb886469 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.331216] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad75b0b-500d-4b8b-8981-496195c26f8d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.360932] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-129cf4c7-3ceb-4067-bea0-8a77cb43a37f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.368421] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9775fc94-5c86-4da1-884f-1733d14e88e0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.396151] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9cf305-1e75-40ac-a383-3f15a129012f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.402338] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94808c9c-ae7c-4bdf-ab24-e864902da44f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.414874] env[68569]: DEBUG nova.virt.block_device [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Updating existing volume attachment record: 9c22e876-7323-4a4e-8eed-0e7c77009203 {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1167.623070] env[68569]: DEBUG nova.scheduler.client.report [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1168.128572] env[68569]: DEBUG oslo_concurrency.lockutils [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.729s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.147060] env[68569]: INFO nova.scheduler.client.report [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Deleted allocations for instance ee188712-b0e0-44ee-80b4-be72da32299f [ 1168.654879] env[68569]: DEBUG oslo_concurrency.lockutils [None req-342fe5b8-4c26-4944-9b94-2a196832fcf6 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "ee188712-b0e0-44ee-80b4-be72da32299f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.892s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.023862] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.023862] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.526400] env[68569]: DEBUG nova.compute.manager [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1172.051803] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.052193] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.053732] env[68569]: INFO nova.compute.claims [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1173.011475] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.160457] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8f84b7-0966-41a9-8470-b05a0e71d0fb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.168068] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9fe9645-cf8e-4a9b-8b33-a876750c20da {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.197552] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3e6157-33bb-4397-8560-ddf6671c4a31 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.204645] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31efdca-cd6d-4799-b526-dc3ec17b1bb7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.217559] env[68569]: DEBUG nova.compute.provider_tree [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1173.720652] env[68569]: DEBUG nova.scheduler.client.report [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1174.227065] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.175s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.227617] env[68569]: DEBUG nova.compute.manager [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1174.230977] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.220s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.231204] env[68569]: DEBUG nova.objects.instance [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lazy-loading 'pci_requests' on Instance uuid 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1174.732875] env[68569]: DEBUG nova.compute.utils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1174.734312] env[68569]: DEBUG nova.compute.manager [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1174.734479] env[68569]: DEBUG nova.network.neutron [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1174.737883] env[68569]: DEBUG nova.objects.instance [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lazy-loading 'numa_topology' on Instance uuid 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1174.780494] env[68569]: DEBUG nova.policy [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b5878b8c7304fce9e150e9be38f10c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7613bcf5361d4b08a8d864e59b7fe858', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1175.071311] env[68569]: DEBUG nova.network.neutron [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Successfully created port: 1ca2f3e3-cee0-4e29-8728-97455622c4be {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1175.237525] env[68569]: DEBUG nova.compute.manager [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1175.240911] env[68569]: INFO nova.compute.claims [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1176.250685] env[68569]: DEBUG nova.compute.manager [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1176.277903] env[68569]: DEBUG nova.virt.hardware [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1176.278192] env[68569]: DEBUG nova.virt.hardware [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1176.278352] env[68569]: DEBUG nova.virt.hardware [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1176.278532] env[68569]: DEBUG nova.virt.hardware [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1176.278674] env[68569]: DEBUG nova.virt.hardware [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1176.278824] env[68569]: DEBUG nova.virt.hardware [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1176.279046] env[68569]: DEBUG nova.virt.hardware [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1176.279210] env[68569]: DEBUG nova.virt.hardware [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1176.279376] env[68569]: DEBUG nova.virt.hardware [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1176.279535] env[68569]: DEBUG nova.virt.hardware [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1176.279704] env[68569]: DEBUG nova.virt.hardware [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1176.280663] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b284f83-3111-4ad7-b2aa-fa65e778a6e2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.290644] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754801e7-763e-4001-ba32-2627e5c0f9f5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.376056] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef7738b-eadf-4129-97c6-49df01d54d50 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.383465] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8ce86a-53b2-4dfd-8926-2684cfc4d432 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.413976] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afeb01b4-d9ed-481a-925e-426aade2aa11 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.419101] env[68569]: DEBUG nova.compute.manager [req-b798ea80-0a9b-4706-855e-053d357dd315 req-dad88c79-1219-4987-9b63-6faffe9f1dbf service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Received event network-vif-plugged-1ca2f3e3-cee0-4e29-8728-97455622c4be {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1176.419270] env[68569]: DEBUG oslo_concurrency.lockutils [req-b798ea80-0a9b-4706-855e-053d357dd315 req-dad88c79-1219-4987-9b63-6faffe9f1dbf service nova] Acquiring lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.419476] env[68569]: DEBUG oslo_concurrency.lockutils [req-b798ea80-0a9b-4706-855e-053d357dd315 req-dad88c79-1219-4987-9b63-6faffe9f1dbf service nova] Lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.419640] env[68569]: DEBUG oslo_concurrency.lockutils [req-b798ea80-0a9b-4706-855e-053d357dd315 req-dad88c79-1219-4987-9b63-6faffe9f1dbf service nova] Lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.419804] env[68569]: DEBUG nova.compute.manager [req-b798ea80-0a9b-4706-855e-053d357dd315 req-dad88c79-1219-4987-9b63-6faffe9f1dbf service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] No waiting events found dispatching network-vif-plugged-1ca2f3e3-cee0-4e29-8728-97455622c4be {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1176.419971] env[68569]: WARNING nova.compute.manager [req-b798ea80-0a9b-4706-855e-053d357dd315 req-dad88c79-1219-4987-9b63-6faffe9f1dbf service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Received unexpected event network-vif-plugged-1ca2f3e3-cee0-4e29-8728-97455622c4be for instance with vm_state building and task_state spawning. [ 1176.423511] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3a3ba0-0e5a-4857-9581-576dccd15ac8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.436889] env[68569]: DEBUG nova.compute.provider_tree [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1176.500034] env[68569]: DEBUG nova.network.neutron [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Successfully updated port: 1ca2f3e3-cee0-4e29-8728-97455622c4be {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1176.939931] env[68569]: DEBUG nova.scheduler.client.report [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1177.002806] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.002960] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1177.003136] env[68569]: DEBUG nova.network.neutron [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1177.445430] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.213s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.496065] env[68569]: INFO nova.network.neutron [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Updating port 8b05f57b-2ff2-49af-8333-0047f5230208 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1177.534856] env[68569]: DEBUG nova.network.neutron [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1177.665758] env[68569]: DEBUG nova.network.neutron [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updating instance_info_cache with network_info: [{"id": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "address": "fa:16:3e:6d:8f:9f", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ca2f3e3-ce", "ovs_interfaceid": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.168673] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1178.169425] env[68569]: DEBUG nova.compute.manager [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Instance network_info: |[{"id": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "address": "fa:16:3e:6d:8f:9f", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ca2f3e3-ce", "ovs_interfaceid": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1178.169524] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:8f:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4df917f7-847a-4c0e-b0e3-69a52e4a1554', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ca2f3e3-cee0-4e29-8728-97455622c4be', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1178.177890] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1178.178526] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1178.178746] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-029a6f9c-16a7-4511-9366-0c8644175344 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.198830] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1178.198830] env[68569]: value = "task-3167833" [ 1178.198830] env[68569]: _type = "Task" [ 1178.198830] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.206544] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167833, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.445859] env[68569]: DEBUG nova.compute.manager [req-c6be050a-39f2-47ad-83f7-2fb7204efa86 req-2ad98247-08d6-421d-a75e-cc9bebc627d2 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Received event network-changed-1ca2f3e3-cee0-4e29-8728-97455622c4be {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1178.446131] env[68569]: DEBUG nova.compute.manager [req-c6be050a-39f2-47ad-83f7-2fb7204efa86 req-2ad98247-08d6-421d-a75e-cc9bebc627d2 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Refreshing instance network info cache due to event network-changed-1ca2f3e3-cee0-4e29-8728-97455622c4be. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1178.446283] env[68569]: DEBUG oslo_concurrency.lockutils [req-c6be050a-39f2-47ad-83f7-2fb7204efa86 req-2ad98247-08d6-421d-a75e-cc9bebc627d2 service nova] Acquiring lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.446427] env[68569]: DEBUG oslo_concurrency.lockutils [req-c6be050a-39f2-47ad-83f7-2fb7204efa86 req-2ad98247-08d6-421d-a75e-cc9bebc627d2 service nova] Acquired lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1178.446586] env[68569]: DEBUG nova.network.neutron [req-c6be050a-39f2-47ad-83f7-2fb7204efa86 req-2ad98247-08d6-421d-a75e-cc9bebc627d2 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Refreshing network info cache for port 1ca2f3e3-cee0-4e29-8728-97455622c4be {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1178.708170] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167833, 'name': CreateVM_Task, 'duration_secs': 0.388901} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.708373] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1178.708988] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.709171] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1178.709483] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1178.709729] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-318c2721-0312-4a9d-8fee-9de0a49ce1d8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.714366] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1178.714366] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5282bdbb-2a5e-702a-075c-e8fb7df5df22" [ 1178.714366] env[68569]: _type = "Task" [ 1178.714366] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.721671] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5282bdbb-2a5e-702a-075c-e8fb7df5df22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.936120] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.936496] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1178.936653] env[68569]: DEBUG nova.network.neutron [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1179.147761] env[68569]: DEBUG nova.network.neutron [req-c6be050a-39f2-47ad-83f7-2fb7204efa86 req-2ad98247-08d6-421d-a75e-cc9bebc627d2 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updated VIF entry in instance network info cache for port 1ca2f3e3-cee0-4e29-8728-97455622c4be. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1179.148237] env[68569]: DEBUG nova.network.neutron [req-c6be050a-39f2-47ad-83f7-2fb7204efa86 req-2ad98247-08d6-421d-a75e-cc9bebc627d2 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updating instance_info_cache with network_info: [{"id": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "address": "fa:16:3e:6d:8f:9f", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ca2f3e3-ce", "ovs_interfaceid": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.227018] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5282bdbb-2a5e-702a-075c-e8fb7df5df22, 'name': SearchDatastore_Task, 'duration_secs': 0.009481} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.227018] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1179.227018] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1179.227018] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.227018] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.227018] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1179.227018] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-788db8ca-3136-45d9-b74a-5d78d9e74d7b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.234449] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1179.234752] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1179.235515] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a490bf67-522e-40f8-b89d-99759fbdb886 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.240549] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1179.240549] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523b1426-7f29-2b2a-75f5-ed1bd2076e75" [ 1179.240549] env[68569]: _type = "Task" [ 1179.240549] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.247947] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523b1426-7f29-2b2a-75f5-ed1bd2076e75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.643275] env[68569]: DEBUG nova.network.neutron [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Updating instance_info_cache with network_info: [{"id": "8b05f57b-2ff2-49af-8333-0047f5230208", "address": "fa:16:3e:54:80:53", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b05f57b-2f", "ovs_interfaceid": "8b05f57b-2ff2-49af-8333-0047f5230208", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.651417] env[68569]: DEBUG oslo_concurrency.lockutils [req-c6be050a-39f2-47ad-83f7-2fb7204efa86 req-2ad98247-08d6-421d-a75e-cc9bebc627d2 service nova] Releasing lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1179.751039] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523b1426-7f29-2b2a-75f5-ed1bd2076e75, 'name': SearchDatastore_Task, 'duration_secs': 0.008643} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.751181] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d70c5101-1b40-4f58-abbb-845e955c669d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.757131] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1179.757131] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522975bc-2350-8b7f-d9da-c621c24c8ae9" [ 1179.757131] env[68569]: _type = "Task" [ 1179.757131] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.764563] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522975bc-2350-8b7f-d9da-c621c24c8ae9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.145810] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.172362] env[68569]: DEBUG nova.virt.hardware [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='fd8c54a7d11e1f3ef44ce32c9618c4f8',container_format='bare',created_at=2025-03-26T04:59:54Z,direct_url=,disk_format='vmdk',id=f6c1f108-99b5-4bc9-80cd-889b31900043,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1303576264-shelved',owner='6f5444e64380448bac041e3c4fd57865',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2025-03-26T05:00:08Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1180.172607] env[68569]: DEBUG nova.virt.hardware [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1180.172761] env[68569]: DEBUG nova.virt.hardware [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1180.172938] env[68569]: DEBUG nova.virt.hardware [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1180.173092] env[68569]: DEBUG nova.virt.hardware [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1180.173237] env[68569]: DEBUG nova.virt.hardware [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1180.173433] env[68569]: DEBUG nova.virt.hardware [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1180.173587] env[68569]: DEBUG nova.virt.hardware [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1180.173748] env[68569]: DEBUG nova.virt.hardware [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1180.173907] env[68569]: DEBUG nova.virt.hardware [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1180.174089] env[68569]: DEBUG nova.virt.hardware [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1180.174930] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd81e5a1-300c-4280-a87e-09f239a58a95 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.183119] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37a1da6-55ea-452d-8d23-55715e5733dc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.197087] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:80:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b05f57b-2ff2-49af-8333-0047f5230208', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1180.204467] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1180.204700] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1180.204906] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca038107-525d-422d-9580-310252cd4662 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.223307] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1180.223307] env[68569]: value = "task-3167835" [ 1180.223307] env[68569]: _type = "Task" [ 1180.223307] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.230945] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167835, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.267614] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]522975bc-2350-8b7f-d9da-c621c24c8ae9, 'name': SearchDatastore_Task, 'duration_secs': 0.008683} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.267906] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.268174] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 6317f756-c9ed-4858-bb2a-c20d9f82f90d/6317f756-c9ed-4858-bb2a-c20d9f82f90d.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1180.268419] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d68d72ca-6519-48b9-aa84-4b93996e04b0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.274100] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1180.274100] env[68569]: value = "task-3167836" [ 1180.274100] env[68569]: _type = "Task" [ 1180.274100] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.281261] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167836, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.475319] env[68569]: DEBUG nova.compute.manager [req-73fdc6a1-5d34-42d3-932e-a0be086859ef req-6833c076-4d7a-47a5-bcd2-52a5af1f6e4c service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Received event network-vif-plugged-8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1180.475549] env[68569]: DEBUG oslo_concurrency.lockutils [req-73fdc6a1-5d34-42d3-932e-a0be086859ef req-6833c076-4d7a-47a5-bcd2-52a5af1f6e4c service nova] Acquiring lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.475773] env[68569]: DEBUG oslo_concurrency.lockutils [req-73fdc6a1-5d34-42d3-932e-a0be086859ef req-6833c076-4d7a-47a5-bcd2-52a5af1f6e4c service nova] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1180.475993] env[68569]: DEBUG oslo_concurrency.lockutils [req-73fdc6a1-5d34-42d3-932e-a0be086859ef req-6833c076-4d7a-47a5-bcd2-52a5af1f6e4c service nova] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.476208] env[68569]: DEBUG nova.compute.manager [req-73fdc6a1-5d34-42d3-932e-a0be086859ef req-6833c076-4d7a-47a5-bcd2-52a5af1f6e4c service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] No waiting events found dispatching network-vif-plugged-8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1180.476339] env[68569]: WARNING nova.compute.manager [req-73fdc6a1-5d34-42d3-932e-a0be086859ef req-6833c076-4d7a-47a5-bcd2-52a5af1f6e4c service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Received unexpected event network-vif-plugged-8b05f57b-2ff2-49af-8333-0047f5230208 for instance with vm_state shelved_offloaded and task_state spawning. [ 1180.476515] env[68569]: DEBUG nova.compute.manager [req-73fdc6a1-5d34-42d3-932e-a0be086859ef req-6833c076-4d7a-47a5-bcd2-52a5af1f6e4c service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Received event network-changed-8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1180.476683] env[68569]: DEBUG nova.compute.manager [req-73fdc6a1-5d34-42d3-932e-a0be086859ef req-6833c076-4d7a-47a5-bcd2-52a5af1f6e4c service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Refreshing instance network info cache due to event network-changed-8b05f57b-2ff2-49af-8333-0047f5230208. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1180.476865] env[68569]: DEBUG oslo_concurrency.lockutils [req-73fdc6a1-5d34-42d3-932e-a0be086859ef req-6833c076-4d7a-47a5-bcd2-52a5af1f6e4c service nova] Acquiring lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.476999] env[68569]: DEBUG oslo_concurrency.lockutils [req-73fdc6a1-5d34-42d3-932e-a0be086859ef req-6833c076-4d7a-47a5-bcd2-52a5af1f6e4c service nova] Acquired lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.477175] env[68569]: DEBUG nova.network.neutron [req-73fdc6a1-5d34-42d3-932e-a0be086859ef req-6833c076-4d7a-47a5-bcd2-52a5af1f6e4c service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Refreshing network info cache for port 8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1180.732799] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167835, 'name': CreateVM_Task, 'duration_secs': 0.313428} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.733159] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1180.733633] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f6c1f108-99b5-4bc9-80cd-889b31900043" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.733805] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f6c1f108-99b5-4bc9-80cd-889b31900043" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.734202] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f6c1f108-99b5-4bc9-80cd-889b31900043" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1180.734446] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d08188f1-60a9-4474-8b50-85f3f65e1040 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.738941] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1180.738941] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52daf3b4-fffc-1edc-58d8-fa8fce483c07" [ 1180.738941] env[68569]: _type = "Task" [ 1180.738941] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.746188] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52daf3b4-fffc-1edc-58d8-fa8fce483c07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.781698] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167836, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.437599} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.781939] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 6317f756-c9ed-4858-bb2a-c20d9f82f90d/6317f756-c9ed-4858-bb2a-c20d9f82f90d.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1180.782159] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1180.782387] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93c4c1ea-6989-4658-86e2-2a94417faa25 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.788322] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1180.788322] env[68569]: value = "task-3167837" [ 1180.788322] env[68569]: _type = "Task" [ 1180.788322] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.795140] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167837, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.176212] env[68569]: DEBUG nova.network.neutron [req-73fdc6a1-5d34-42d3-932e-a0be086859ef req-6833c076-4d7a-47a5-bcd2-52a5af1f6e4c service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Updated VIF entry in instance network info cache for port 8b05f57b-2ff2-49af-8333-0047f5230208. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1181.176614] env[68569]: DEBUG nova.network.neutron [req-73fdc6a1-5d34-42d3-932e-a0be086859ef req-6833c076-4d7a-47a5-bcd2-52a5af1f6e4c service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Updating instance_info_cache with network_info: [{"id": "8b05f57b-2ff2-49af-8333-0047f5230208", "address": "fa:16:3e:54:80:53", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b05f57b-2f", "ovs_interfaceid": "8b05f57b-2ff2-49af-8333-0047f5230208", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.249100] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f6c1f108-99b5-4bc9-80cd-889b31900043" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.249341] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Processing image f6c1f108-99b5-4bc9-80cd-889b31900043 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1181.249571] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f6c1f108-99b5-4bc9-80cd-889b31900043/f6c1f108-99b5-4bc9-80cd-889b31900043.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.249717] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f6c1f108-99b5-4bc9-80cd-889b31900043/f6c1f108-99b5-4bc9-80cd-889b31900043.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.249920] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1181.250163] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5c48660-7891-4354-8ab3-afd4d40ea643 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.258922] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1181.259108] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1181.259765] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ba4328a-1e78-4c5e-a94d-e915564932f7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.264457] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1181.264457] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b01395-a193-6950-e99d-cf742a531e54" [ 1181.264457] env[68569]: _type = "Task" [ 1181.264457] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.271940] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b01395-a193-6950-e99d-cf742a531e54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.296872] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167837, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.055087} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.297141] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1181.297877] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c3ece40-8ce9-4c0d-9dbe-5813b4592431 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.319161] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 6317f756-c9ed-4858-bb2a-c20d9f82f90d/6317f756-c9ed-4858-bb2a-c20d9f82f90d.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1181.319712] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68c0f9c4-5b84-405d-aae8-0729f07cf5bc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.339353] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1181.339353] env[68569]: value = "task-3167838" [ 1181.339353] env[68569]: _type = "Task" [ 1181.339353] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.352800] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167838, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.680350] env[68569]: DEBUG oslo_concurrency.lockutils [req-73fdc6a1-5d34-42d3-932e-a0be086859ef req-6833c076-4d7a-47a5-bcd2-52a5af1f6e4c service nova] Releasing lock "refresh_cache-6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.774631] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Preparing fetch location {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1181.774900] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Fetch image to [datastore1] OSTACK_IMG_8bcf7d46-0abf-4563-b9e9-3ab8fd6879cd/OSTACK_IMG_8bcf7d46-0abf-4563-b9e9-3ab8fd6879cd.vmdk {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1181.775081] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Downloading stream optimized image f6c1f108-99b5-4bc9-80cd-889b31900043 to [datastore1] OSTACK_IMG_8bcf7d46-0abf-4563-b9e9-3ab8fd6879cd/OSTACK_IMG_8bcf7d46-0abf-4563-b9e9-3ab8fd6879cd.vmdk on the data store datastore1 as vApp {{(pid=68569) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1181.775255] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Downloading image file data f6c1f108-99b5-4bc9-80cd-889b31900043 to the ESX as VM named 'OSTACK_IMG_8bcf7d46-0abf-4563-b9e9-3ab8fd6879cd' {{(pid=68569) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1181.844941] env[68569]: DEBUG oslo_vmware.rw_handles [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1181.844941] env[68569]: value = "resgroup-9" [ 1181.844941] env[68569]: _type = "ResourcePool" [ 1181.844941] env[68569]: }. {{(pid=68569) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1181.845324] env[68569]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-8dde95a2-f160-413d-9099-454a3a749f2f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.865202] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167838, 'name': ReconfigVM_Task, 'duration_secs': 0.278227} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.866433] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 6317f756-c9ed-4858-bb2a-c20d9f82f90d/6317f756-c9ed-4858-bb2a-c20d9f82f90d.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1181.867118] env[68569]: DEBUG oslo_vmware.rw_handles [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lease: (returnval){ [ 1181.867118] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b935ec-6e2e-3409-31e7-a88ded7e9bfc" [ 1181.867118] env[68569]: _type = "HttpNfcLease" [ 1181.867118] env[68569]: } obtained for vApp import into resource pool (val){ [ 1181.867118] env[68569]: value = "resgroup-9" [ 1181.867118] env[68569]: _type = "ResourcePool" [ 1181.867118] env[68569]: }. {{(pid=68569) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1181.867545] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the lease: (returnval){ [ 1181.867545] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b935ec-6e2e-3409-31e7-a88ded7e9bfc" [ 1181.867545] env[68569]: _type = "HttpNfcLease" [ 1181.867545] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1181.867545] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be2cbe52-9420-4c28-a882-23ceba518922 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.876586] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1181.876586] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b935ec-6e2e-3409-31e7-a88ded7e9bfc" [ 1181.876586] env[68569]: _type = "HttpNfcLease" [ 1181.876586] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1181.877547] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1181.877547] env[68569]: value = "task-3167840" [ 1181.877547] env[68569]: _type = "Task" [ 1181.877547] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.885399] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167840, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.376963] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1182.376963] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b935ec-6e2e-3409-31e7-a88ded7e9bfc" [ 1182.376963] env[68569]: _type = "HttpNfcLease" [ 1182.376963] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1182.377267] env[68569]: DEBUG oslo_vmware.rw_handles [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1182.377267] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b935ec-6e2e-3409-31e7-a88ded7e9bfc" [ 1182.377267] env[68569]: _type = "HttpNfcLease" [ 1182.377267] env[68569]: }. {{(pid=68569) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1182.377982] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38c1622-579e-466c-83d2-7f63b7c38439 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.387473] env[68569]: DEBUG oslo_vmware.rw_handles [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528d0a63-ec0c-4188-3d1f-c3668eb45b59/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1182.387473] env[68569]: DEBUG oslo_vmware.rw_handles [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528d0a63-ec0c-4188-3d1f-c3668eb45b59/disk-0.vmdk. {{(pid=68569) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1182.391515] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167840, 'name': Rename_Task, 'duration_secs': 0.168036} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.393049] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1182.446904] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc493dba-fb42-444e-8d58-00ab6db47e73 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.455491] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1bb1ab70-88fe-489a-a745-6b5574964403 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.457427] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1182.457427] env[68569]: value = "task-3167841" [ 1182.457427] env[68569]: _type = "Task" [ 1182.457427] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.468604] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167841, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.698113] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "7289fe12-4f1f-488f-9be2-a7cb666727b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1182.698462] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "7289fe12-4f1f-488f-9be2-a7cb666727b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.969276] env[68569]: DEBUG oslo_vmware.api [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167841, 'name': PowerOnVM_Task, 'duration_secs': 0.468163} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.971284] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1182.971506] env[68569]: INFO nova.compute.manager [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Took 6.72 seconds to spawn the instance on the hypervisor. [ 1182.971685] env[68569]: DEBUG nova.compute.manager [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1182.972523] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6230e444-196e-4701-babb-74e8c0f31841 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.200682] env[68569]: DEBUG nova.compute.manager [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1183.473353] env[68569]: DEBUG oslo_vmware.rw_handles [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Completed reading data from the image iterator. {{(pid=68569) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1183.473674] env[68569]: DEBUG oslo_vmware.rw_handles [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528d0a63-ec0c-4188-3d1f-c3668eb45b59/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1183.474959] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bef9eb8-c540-4da6-b32c-f274bc53476e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.489097] env[68569]: DEBUG oslo_vmware.rw_handles [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528d0a63-ec0c-4188-3d1f-c3668eb45b59/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1183.489341] env[68569]: DEBUG oslo_vmware.rw_handles [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528d0a63-ec0c-4188-3d1f-c3668eb45b59/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1183.489679] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-669c14f7-1e75-4e6d-b882-c1b9f91a26f5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.495314] env[68569]: INFO nova.compute.manager [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Took 11.46 seconds to build instance. [ 1183.681673] env[68569]: DEBUG oslo_vmware.rw_handles [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528d0a63-ec0c-4188-3d1f-c3668eb45b59/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1183.682150] env[68569]: INFO nova.virt.vmwareapi.images [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Downloaded image file data f6c1f108-99b5-4bc9-80cd-889b31900043 [ 1183.683452] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0267b488-ab57-45fb-ae0b-b87552c12895 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.701304] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d254ceea-e38e-47e2-a002-9fb81132627a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.723860] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.724170] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.725775] env[68569]: INFO nova.compute.claims [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1183.729775] env[68569]: INFO nova.virt.vmwareapi.images [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] The imported VM was unregistered [ 1183.733023] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Caching image {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1183.733023] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Creating directory with path [datastore1] devstack-image-cache_base/f6c1f108-99b5-4bc9-80cd-889b31900043 {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1183.733023] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebfee7b9-d2fd-4ed3-a255-2c6169b13780 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.743152] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Created directory with path [datastore1] devstack-image-cache_base/f6c1f108-99b5-4bc9-80cd-889b31900043 {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1183.743361] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_8bcf7d46-0abf-4563-b9e9-3ab8fd6879cd/OSTACK_IMG_8bcf7d46-0abf-4563-b9e9-3ab8fd6879cd.vmdk to [datastore1] devstack-image-cache_base/f6c1f108-99b5-4bc9-80cd-889b31900043/f6c1f108-99b5-4bc9-80cd-889b31900043.vmdk. {{(pid=68569) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1183.743619] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-69d49eb7-9f57-41f5-9021-f5165f4c91c9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.750542] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1183.750542] env[68569]: value = "task-3167843" [ 1183.750542] env[68569]: _type = "Task" [ 1183.750542] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.760021] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167843, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.997828] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e36bb93-cd18-4718-9f72-761e97d8297d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.974s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.131192] env[68569]: DEBUG nova.compute.manager [req-78230d01-2ecd-48a5-9279-2803afb1c888 req-7c2e4690-4e2c-48f2-bae3-5fd3387657dc service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Received event network-changed-1ca2f3e3-cee0-4e29-8728-97455622c4be {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1184.131261] env[68569]: DEBUG nova.compute.manager [req-78230d01-2ecd-48a5-9279-2803afb1c888 req-7c2e4690-4e2c-48f2-bae3-5fd3387657dc service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Refreshing instance network info cache due to event network-changed-1ca2f3e3-cee0-4e29-8728-97455622c4be. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1184.132091] env[68569]: DEBUG oslo_concurrency.lockutils [req-78230d01-2ecd-48a5-9279-2803afb1c888 req-7c2e4690-4e2c-48f2-bae3-5fd3387657dc service nova] Acquiring lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.132091] env[68569]: DEBUG oslo_concurrency.lockutils [req-78230d01-2ecd-48a5-9279-2803afb1c888 req-7c2e4690-4e2c-48f2-bae3-5fd3387657dc service nova] Acquired lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.132091] env[68569]: DEBUG nova.network.neutron [req-78230d01-2ecd-48a5-9279-2803afb1c888 req-7c2e4690-4e2c-48f2-bae3-5fd3387657dc service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Refreshing network info cache for port 1ca2f3e3-cee0-4e29-8728-97455622c4be {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1184.260525] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167843, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.700883] env[68569]: DEBUG nova.compute.manager [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1184.702337] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b548fd-3df4-42c8-a877-af40700fe8ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.763629] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167843, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.901953] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3211adcd-1835-4c31-bcfa-8ade973206a7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.909769] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18572d93-a919-4902-96c3-b605b432f365 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.915567] env[68569]: DEBUG nova.network.neutron [req-78230d01-2ecd-48a5-9279-2803afb1c888 req-7c2e4690-4e2c-48f2-bae3-5fd3387657dc service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updated VIF entry in instance network info cache for port 1ca2f3e3-cee0-4e29-8728-97455622c4be. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1184.915907] env[68569]: DEBUG nova.network.neutron [req-78230d01-2ecd-48a5-9279-2803afb1c888 req-7c2e4690-4e2c-48f2-bae3-5fd3387657dc service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updating instance_info_cache with network_info: [{"id": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "address": "fa:16:3e:6d:8f:9f", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ca2f3e3-ce", "ovs_interfaceid": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.946466] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f4621b-d2af-48e7-bf61-d8448fe13831 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.954977] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c07680-fbf6-45f9-b018-28a641982b7f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.970533] env[68569]: DEBUG nova.compute.provider_tree [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1185.067729] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "693d59a2-f8f5-4f63-af55-192b0c458ddf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.068251] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "693d59a2-f8f5-4f63-af55-192b0c458ddf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.217973] env[68569]: INFO nova.compute.manager [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] instance snapshotting [ 1185.219263] env[68569]: DEBUG nova.objects.instance [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lazy-loading 'flavor' on Instance uuid fdcdd4b5-82bd-43c9-8865-807f86789a99 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1185.261404] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167843, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.418909] env[68569]: DEBUG oslo_concurrency.lockutils [req-78230d01-2ecd-48a5-9279-2803afb1c888 req-7c2e4690-4e2c-48f2-bae3-5fd3387657dc service nova] Releasing lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.473757] env[68569]: DEBUG nova.scheduler.client.report [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1185.570322] env[68569]: DEBUG nova.compute.manager [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1185.724844] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc615239-015a-4603-b13c-cf2e522b3177 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.746903] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2bb2a43-c56c-4ea5-8196-146d39908ac8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.764446] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167843, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.979952] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.256s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.980399] env[68569]: DEBUG nova.compute.manager [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1186.093696] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.094119] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.095653] env[68569]: INFO nova.compute.claims [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1186.260423] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1186.264352] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-48cdbdee-2e67-4fd3-9120-9a2ec6beb405 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.266272] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167843, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.346336} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.266429] env[68569]: INFO nova.virt.vmwareapi.ds_util [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_8bcf7d46-0abf-4563-b9e9-3ab8fd6879cd/OSTACK_IMG_8bcf7d46-0abf-4563-b9e9-3ab8fd6879cd.vmdk to [datastore1] devstack-image-cache_base/f6c1f108-99b5-4bc9-80cd-889b31900043/f6c1f108-99b5-4bc9-80cd-889b31900043.vmdk. [ 1186.266608] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Cleaning up location [datastore1] OSTACK_IMG_8bcf7d46-0abf-4563-b9e9-3ab8fd6879cd {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1186.266767] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_8bcf7d46-0abf-4563-b9e9-3ab8fd6879cd {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1186.267329] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ab8c4bba-c915-4a02-87bb-ab2244a4ed9c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.271573] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1186.271573] env[68569]: value = "task-3167844" [ 1186.271573] env[68569]: _type = "Task" [ 1186.271573] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.275590] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1186.275590] env[68569]: value = "task-3167845" [ 1186.275590] env[68569]: _type = "Task" [ 1186.275590] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.281358] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167844, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.285926] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167845, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.485825] env[68569]: DEBUG nova.compute.utils [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1186.487337] env[68569]: DEBUG nova.compute.manager [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1186.487518] env[68569]: DEBUG nova.network.neutron [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1186.523492] env[68569]: DEBUG nova.policy [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54ebbdfe9bfb4854a40b07d60c7a9efb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f361997374e943cfa7a8e4e4884d6c65', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1186.789152] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167844, 'name': CreateSnapshot_Task, 'duration_secs': 0.512196} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.791738] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1186.792038] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167845, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034017} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.792757] env[68569]: DEBUG nova.network.neutron [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Successfully created port: 518ea6b8-1dba-4b9b-aa1e-73186cca17de {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1186.796031] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04f2b12-cf5c-4922-a98c-0cacc56a37eb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.797518] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1186.797699] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f6c1f108-99b5-4bc9-80cd-889b31900043/f6c1f108-99b5-4bc9-80cd-889b31900043.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.797971] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f6c1f108-99b5-4bc9-80cd-889b31900043/f6c1f108-99b5-4bc9-80cd-889b31900043.vmdk to [datastore1] 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a/6dbe8a18-c317-4b36-bd6f-922ce9f85b6a.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1186.798230] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a6e6ecf-6daf-41d1-a392-4714123d6f39 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.808726] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1186.808726] env[68569]: value = "task-3167846" [ 1186.808726] env[68569]: _type = "Task" [ 1186.808726] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.817747] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167846, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.991325] env[68569]: DEBUG nova.compute.manager [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1187.269382] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9b2e2c-c72d-4d40-910e-e898bd685f16 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.279761] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a885b67f-1f67-4816-a10a-4bd2d7de152d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.335502] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1187.335865] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f6bd5b0c-528f-4a02-92a0-4f29a7af0c7b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.345544] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d3b03d-aacc-4767-be5e-5a4e967f1699 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.353148] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167846, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.356420] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1187.356420] env[68569]: value = "task-3167847" [ 1187.356420] env[68569]: _type = "Task" [ 1187.356420] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.357630] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c41ebd8-413d-43ac-a3b4-22892717114b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.374502] env[68569]: DEBUG nova.compute.provider_tree [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1187.379164] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1187.380505] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167847, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.496614] env[68569]: INFO nova.virt.block_device [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Booting with volume 5f7eaca7-05fb-4187-bfc2-ecf09e846667 at /dev/sda [ 1187.538047] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3490e757-c9dd-4e51-80a2-e641f42af88f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.549838] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed5ea72-f219-4c8d-8eff-ce6a4052bdb2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.585819] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-939be48a-7aa2-411d-8e28-4027e0beb609 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.597017] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d724d4f9-8b16-46de-9aaa-12f60d043c9a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.631884] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b3b5d33-a1b2-42d2-8fb6-9748f66188b3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.640802] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef47c08b-93d7-4169-aa5a-6289f7f615c4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.656999] env[68569]: DEBUG nova.virt.block_device [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Updating existing volume attachment record: 9c4b3829-2286-4db5-ae8e-655ad98d25c6 {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1187.846525] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167846, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.873801] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167847, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.881956] env[68569]: DEBUG nova.scheduler.client.report [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1187.887368] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Getting list of instances from cluster (obj){ [ 1187.887368] env[68569]: value = "domain-c8" [ 1187.887368] env[68569]: _type = "ClusterComputeResource" [ 1187.887368] env[68569]: } {{(pid=68569) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1187.888679] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681c2def-a87e-463b-a7b6-791e0b806d73 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.911712] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Got total of 9 instances {{(pid=68569) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1187.911895] env[68569]: WARNING nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] While synchronizing instance power states, found 11 instances in the database and 9 instances on the hypervisor. [ 1187.912056] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid 61aa0997-ffa6-4551-bdaa-132026e240f9 {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 1187.912219] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid db75de86-9dda-42b2-9e7a-55e2ba5adad1 {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 1187.912372] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 1187.912520] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid fdcdd4b5-82bd-43c9-8865-807f86789a99 {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 1187.912661] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid 5de9a459-a2a2-4d78-9a66-cf819e8893b6 {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 1187.912806] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid 09f4018b-f1cd-4726-b871-b110a7cf1b43 {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 1187.912947] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid 3363dac8-c5df-405e-8bdc-9002e2d45e05 {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 1187.913139] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 1187.913311] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid 6317f756-c9ed-4858-bb2a-c20d9f82f90d {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 1187.913453] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid 7289fe12-4f1f-488f-9be2-a7cb666727b3 {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 1187.913592] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Triggering sync for uuid 693d59a2-f8f5-4f63-af55-192b0c458ddf {{(pid=68569) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10951}} [ 1187.913968] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "61aa0997-ffa6-4551-bdaa-132026e240f9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.914207] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "61aa0997-ffa6-4551-bdaa-132026e240f9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.914481] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "db75de86-9dda-42b2-9e7a-55e2ba5adad1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.914660] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "db75de86-9dda-42b2-9e7a-55e2ba5adad1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.914899] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.915119] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "fdcdd4b5-82bd-43c9-8865-807f86789a99" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.915292] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "fdcdd4b5-82bd-43c9-8865-807f86789a99" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.915452] env[68569]: INFO nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] During sync_power_state the instance has a pending task (image_uploading). Skip. [ 1187.915609] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "fdcdd4b5-82bd-43c9-8865-807f86789a99" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.915807] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.916008] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.916252] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.916425] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.916643] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "3363dac8-c5df-405e-8bdc-9002e2d45e05" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.916810] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "3363dac8-c5df-405e-8bdc-9002e2d45e05" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.917037] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.917212] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.917426] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.917595] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.917847] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "7289fe12-4f1f-488f-9be2-a7cb666727b3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.918043] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "693d59a2-f8f5-4f63-af55-192b0c458ddf" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.918942] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e685f447-b8bf-40b4-be80-2a08afe5166f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.922376] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9247b89-e9db-46f1-8fa8-89bb9d14595e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.925768] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bd1515-81a7-46f3-86c0-96f0670e3629 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.928525] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7eab59-9e81-4089-8a70-eacfe498c5b8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.931387] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1211a31-ab71-408c-86c0-48a8b4ce0689 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.934648] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-688b1e5c-4c35-424c-9a0c-fd3baf582a8b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.938109] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62b75e6-a8fa-4114-ae04-f0d7b4dcab1f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.131441] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.249305] env[68569]: DEBUG nova.compute.manager [req-51f0c825-d889-488c-853a-56a7b02d8a3d req-019fa1c5-70bc-4954-a1d9-6096e178c950 service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Received event network-vif-plugged-518ea6b8-1dba-4b9b-aa1e-73186cca17de {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1188.249533] env[68569]: DEBUG oslo_concurrency.lockutils [req-51f0c825-d889-488c-853a-56a7b02d8a3d req-019fa1c5-70bc-4954-a1d9-6096e178c950 service nova] Acquiring lock "7289fe12-4f1f-488f-9be2-a7cb666727b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.249738] env[68569]: DEBUG oslo_concurrency.lockutils [req-51f0c825-d889-488c-853a-56a7b02d8a3d req-019fa1c5-70bc-4954-a1d9-6096e178c950 service nova] Lock "7289fe12-4f1f-488f-9be2-a7cb666727b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.249941] env[68569]: DEBUG oslo_concurrency.lockutils [req-51f0c825-d889-488c-853a-56a7b02d8a3d req-019fa1c5-70bc-4954-a1d9-6096e178c950 service nova] Lock "7289fe12-4f1f-488f-9be2-a7cb666727b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.250697] env[68569]: DEBUG nova.compute.manager [req-51f0c825-d889-488c-853a-56a7b02d8a3d req-019fa1c5-70bc-4954-a1d9-6096e178c950 service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] No waiting events found dispatching network-vif-plugged-518ea6b8-1dba-4b9b-aa1e-73186cca17de {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1188.250887] env[68569]: WARNING nova.compute.manager [req-51f0c825-d889-488c-853a-56a7b02d8a3d req-019fa1c5-70bc-4954-a1d9-6096e178c950 service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Received unexpected event network-vif-plugged-518ea6b8-1dba-4b9b-aa1e-73186cca17de for instance with vm_state building and task_state block_device_mapping. [ 1188.350918] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167846, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.353040] env[68569]: DEBUG nova.network.neutron [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Successfully updated port: 518ea6b8-1dba-4b9b-aa1e-73186cca17de {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1188.377805] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167847, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.389438] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.295s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.389962] env[68569]: DEBUG nova.compute.manager [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1188.453619] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "db75de86-9dda-42b2-9e7a-55e2ba5adad1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.539s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.461310] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.545s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.463175] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "61aa0997-ffa6-4551-bdaa-132026e240f9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.549s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.466673] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.550s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.468139] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.551s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.468438] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.551s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.471885] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "3363dac8-c5df-405e-8bdc-9002e2d45e05" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.555s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.854988] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167846, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.855822] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "refresh_cache-7289fe12-4f1f-488f-9be2-a7cb666727b3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.856059] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "refresh_cache-7289fe12-4f1f-488f-9be2-a7cb666727b3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1188.856319] env[68569]: DEBUG nova.network.neutron [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1188.878083] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167847, 'name': CloneVM_Task, 'duration_secs': 1.39968} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.878402] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Created linked-clone VM from snapshot [ 1188.879304] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf5faad-5cbd-4372-b3fd-34b774bfdff7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.892459] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Uploading image 5f41e5c3-e84e-4f6c-bb68-aa7db4c585c6 {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1188.896202] env[68569]: DEBUG nova.compute.utils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1188.897444] env[68569]: DEBUG nova.compute.manager [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1188.897614] env[68569]: DEBUG nova.network.neutron [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1188.916202] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1188.916202] env[68569]: value = "vm-633733" [ 1188.916202] env[68569]: _type = "VirtualMachine" [ 1188.916202] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1188.916782] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f8fb3433-a395-45ff-b15f-d677ed485aa7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.929929] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lease: (returnval){ [ 1188.929929] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523b7012-1e11-cfad-a5bf-0d6687fced0f" [ 1188.929929] env[68569]: _type = "HttpNfcLease" [ 1188.929929] env[68569]: } obtained for exporting VM: (result){ [ 1188.929929] env[68569]: value = "vm-633733" [ 1188.929929] env[68569]: _type = "VirtualMachine" [ 1188.929929] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1188.930503] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the lease: (returnval){ [ 1188.930503] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523b7012-1e11-cfad-a5bf-0d6687fced0f" [ 1188.930503] env[68569]: _type = "HttpNfcLease" [ 1188.930503] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1188.943406] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1188.943406] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523b7012-1e11-cfad-a5bf-0d6687fced0f" [ 1188.943406] env[68569]: _type = "HttpNfcLease" [ 1188.943406] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1188.944992] env[68569]: DEBUG nova.policy [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b5878b8c7304fce9e150e9be38f10c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7613bcf5361d4b08a8d864e59b7fe858', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1189.259137] env[68569]: DEBUG nova.network.neutron [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Successfully created port: 5bab5cde-e125-4604-9b7a-f3e491b5e7c8 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1189.347482] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167846, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.426841} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.347665] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f6c1f108-99b5-4bc9-80cd-889b31900043/f6c1f108-99b5-4bc9-80cd-889b31900043.vmdk to [datastore1] 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a/6dbe8a18-c317-4b36-bd6f-922ce9f85b6a.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1189.348504] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a44ed7-ffff-4809-8df9-e44de44c2ae5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.372302] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a/6dbe8a18-c317-4b36-bd6f-922ce9f85b6a.vmdk or device None with type streamOptimized {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1189.372673] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a52784c7-14ec-4d91-a29a-56fc0fb9643b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.394195] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1189.394195] env[68569]: value = "task-3167849" [ 1189.394195] env[68569]: _type = "Task" [ 1189.394195] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.400839] env[68569]: DEBUG nova.compute.manager [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1189.407877] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167849, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.430393] env[68569]: DEBUG nova.network.neutron [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1189.441201] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1189.441201] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523b7012-1e11-cfad-a5bf-0d6687fced0f" [ 1189.441201] env[68569]: _type = "HttpNfcLease" [ 1189.441201] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1189.441568] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1189.441568] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523b7012-1e11-cfad-a5bf-0d6687fced0f" [ 1189.441568] env[68569]: _type = "HttpNfcLease" [ 1189.441568] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1189.442301] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc714217-2bf2-402a-a42c-a60e9fcf8084 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.450518] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52082b22-a525-13e4-3d02-6d5c55ea2f36/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1189.450721] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52082b22-a525-13e4-3d02-6d5c55ea2f36/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1189.551256] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-36608efb-af0e-454a-bf89-357c6ff57809 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.597115] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.597364] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.629733] env[68569]: DEBUG nova.network.neutron [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Updating instance_info_cache with network_info: [{"id": "518ea6b8-1dba-4b9b-aa1e-73186cca17de", "address": "fa:16:3e:85:26:c6", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap518ea6b8-1d", "ovs_interfaceid": "518ea6b8-1dba-4b9b-aa1e-73186cca17de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.760027] env[68569]: DEBUG nova.compute.manager [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1189.760027] env[68569]: DEBUG nova.virt.hardware [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1189.760293] env[68569]: DEBUG nova.virt.hardware [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1189.760293] env[68569]: DEBUG nova.virt.hardware [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1189.760459] env[68569]: DEBUG nova.virt.hardware [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1189.760711] env[68569]: DEBUG nova.virt.hardware [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1189.760885] env[68569]: DEBUG nova.virt.hardware [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1189.761127] env[68569]: DEBUG nova.virt.hardware [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1189.761288] env[68569]: DEBUG nova.virt.hardware [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1189.761453] env[68569]: DEBUG nova.virt.hardware [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1189.761609] env[68569]: DEBUG nova.virt.hardware [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1189.761808] env[68569]: DEBUG nova.virt.hardware [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1189.762800] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8549d0-d819-43b3-b17b-4416eaab965e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.773043] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45cdcf52-b5fb-4e6f-b8de-cc4d09acda21 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.908396] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167849, 'name': ReconfigVM_Task, 'duration_secs': 0.307791} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.909304] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a/6dbe8a18-c317-4b36-bd6f-922ce9f85b6a.vmdk or device None with type streamOptimized {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.910482] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'encryption_options': None, 'device_type': 'disk', 'disk_bus': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'encryption_format': None, 'guest_format': None, 'device_name': '/dev/sda', 'encrypted': False, 'image_id': 'cfcf6154-fe87-45d3-9aaf-2d3604c95629'}], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633727', 'volume_id': '60cd4f2b-f681-462c-959e-39f6ce745f96', 'name': 'volume-60cd4f2b-f681-462c-959e-39f6ce745f96', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '6dbe8a18-c317-4b36-bd6f-922ce9f85b6a', 'attached_at': '', 'detached_at': '', 'volume_id': '60cd4f2b-f681-462c-959e-39f6ce745f96', 'serial': '60cd4f2b-f681-462c-959e-39f6ce745f96'}, 'delete_on_termination': False, 'disk_bus': None, 'boot_index': None, 'mount_device': '/dev/sdb', 'guest_format': None, 'attachment_id': '9c22e876-7323-4a4e-8eed-0e7c77009203', 'volume_type': None}], 'swap': None} {{(pid=68569) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1189.910769] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1189.911031] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633727', 'volume_id': '60cd4f2b-f681-462c-959e-39f6ce745f96', 'name': 'volume-60cd4f2b-f681-462c-959e-39f6ce745f96', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '6dbe8a18-c317-4b36-bd6f-922ce9f85b6a', 'attached_at': '', 'detached_at': '', 'volume_id': '60cd4f2b-f681-462c-959e-39f6ce745f96', 'serial': '60cd4f2b-f681-462c-959e-39f6ce745f96'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1189.911921] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbfa1f3-eab5-4674-9b5a-ace31036c1af {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.929720] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c386c1e0-bcb1-41dd-bbe0-cc8a9722d963 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.956526] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] volume-60cd4f2b-f681-462c-959e-39f6ce745f96/volume-60cd4f2b-f681-462c-959e-39f6ce745f96.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1189.957843] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-caa78eba-e305-4030-820d-6c224b66217d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.979252] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1189.979252] env[68569]: value = "task-3167850" [ 1189.979252] env[68569]: _type = "Task" [ 1189.979252] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.988685] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167850, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.132107] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "refresh_cache-7289fe12-4f1f-488f-9be2-a7cb666727b3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1190.132500] env[68569]: DEBUG nova.compute.manager [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Instance network_info: |[{"id": "518ea6b8-1dba-4b9b-aa1e-73186cca17de", "address": "fa:16:3e:85:26:c6", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap518ea6b8-1d", "ovs_interfaceid": "518ea6b8-1dba-4b9b-aa1e-73186cca17de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1190.133155] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:26:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f8442aa5-73db-4599-8564-b98a6ea26b9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '518ea6b8-1dba-4b9b-aa1e-73186cca17de', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1190.141378] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1190.141856] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1190.143087] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97b19daa-aaa2-44d1-a0f2-dd8ed27c8817 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.167398] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1190.167398] env[68569]: value = "task-3167851" [ 1190.167398] env[68569]: _type = "Task" [ 1190.167398] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.177420] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167851, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.281055] env[68569]: DEBUG nova.compute.manager [req-819ee5b4-7e58-414a-83e5-aee232087071 req-0d720d8c-0fac-45ae-b199-680d1d3885ad service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Received event network-changed-518ea6b8-1dba-4b9b-aa1e-73186cca17de {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1190.281736] env[68569]: DEBUG nova.compute.manager [req-819ee5b4-7e58-414a-83e5-aee232087071 req-0d720d8c-0fac-45ae-b199-680d1d3885ad service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Refreshing instance network info cache due to event network-changed-518ea6b8-1dba-4b9b-aa1e-73186cca17de. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1190.282027] env[68569]: DEBUG oslo_concurrency.lockutils [req-819ee5b4-7e58-414a-83e5-aee232087071 req-0d720d8c-0fac-45ae-b199-680d1d3885ad service nova] Acquiring lock "refresh_cache-7289fe12-4f1f-488f-9be2-a7cb666727b3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.282238] env[68569]: DEBUG oslo_concurrency.lockutils [req-819ee5b4-7e58-414a-83e5-aee232087071 req-0d720d8c-0fac-45ae-b199-680d1d3885ad service nova] Acquired lock "refresh_cache-7289fe12-4f1f-488f-9be2-a7cb666727b3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1190.282507] env[68569]: DEBUG nova.network.neutron [req-819ee5b4-7e58-414a-83e5-aee232087071 req-0d720d8c-0fac-45ae-b199-680d1d3885ad service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Refreshing network info cache for port 518ea6b8-1dba-4b9b-aa1e-73186cca17de {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1190.410897] env[68569]: DEBUG nova.compute.manager [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1190.438197] env[68569]: DEBUG nova.virt.hardware [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1190.438916] env[68569]: DEBUG nova.virt.hardware [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1190.439044] env[68569]: DEBUG nova.virt.hardware [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1190.439214] env[68569]: DEBUG nova.virt.hardware [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1190.439385] env[68569]: DEBUG nova.virt.hardware [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1190.439545] env[68569]: DEBUG nova.virt.hardware [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1190.439792] env[68569]: DEBUG nova.virt.hardware [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1190.440027] env[68569]: DEBUG nova.virt.hardware [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1190.440228] env[68569]: DEBUG nova.virt.hardware [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1190.440394] env[68569]: DEBUG nova.virt.hardware [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1190.440640] env[68569]: DEBUG nova.virt.hardware [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1190.441555] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1dc0f1-036c-423e-95b2-d406efa85b98 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.450578] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ef0888-d528-4932-912a-ef9b5014b311 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.490591] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167850, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.597345] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1190.597596] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1190.679553] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167851, 'name': CreateVM_Task, 'duration_secs': 0.363091} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.679784] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1190.688405] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633728', 'volume_id': '5f7eaca7-05fb-4187-bfc2-ecf09e846667', 'name': 'volume-5f7eaca7-05fb-4187-bfc2-ecf09e846667', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7289fe12-4f1f-488f-9be2-a7cb666727b3', 'attached_at': '', 'detached_at': '', 'volume_id': '5f7eaca7-05fb-4187-bfc2-ecf09e846667', 'serial': '5f7eaca7-05fb-4187-bfc2-ecf09e846667'}, 'delete_on_termination': True, 'disk_bus': None, 'boot_index': 0, 'mount_device': '/dev/sda', 'guest_format': None, 'attachment_id': '9c4b3829-2286-4db5-ae8e-655ad98d25c6', 'volume_type': None}], 'swap': None} {{(pid=68569) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1190.688629] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Root volume attach. Driver type: vmdk {{(pid=68569) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1190.689470] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699b1fc8-49ec-4ebb-bafc-dac2e7f208f7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.697943] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fcf29a2-cb20-4f4c-ae17-88d17e7302a1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.704888] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c02691-3f9b-4211-acfc-907c4d1b4d4e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.712098] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-dfc04c3e-9a32-41d1-9dcd-d794aa4a92d7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.720774] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1190.720774] env[68569]: value = "task-3167852" [ 1190.720774] env[68569]: _type = "Task" [ 1190.720774] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.729508] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167852, 'name': RelocateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.776509] env[68569]: DEBUG nova.network.neutron [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Successfully updated port: 5bab5cde-e125-4604-9b7a-f3e491b5e7c8 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1190.991652] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167850, 'name': ReconfigVM_Task, 'duration_secs': 0.5762} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.992049] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Reconfigured VM instance instance-00000068 to attach disk [datastore1] volume-60cd4f2b-f681-462c-959e-39f6ce745f96/volume-60cd4f2b-f681-462c-959e-39f6ce745f96.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1190.996995] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e62d984c-f11a-40ea-b8a8-d2c957de54de {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.017260] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1191.017260] env[68569]: value = "task-3167853" [ 1191.017260] env[68569]: _type = "Task" [ 1191.017260] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.035117] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167853, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.061565] env[68569]: DEBUG nova.network.neutron [req-819ee5b4-7e58-414a-83e5-aee232087071 req-0d720d8c-0fac-45ae-b199-680d1d3885ad service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Updated VIF entry in instance network info cache for port 518ea6b8-1dba-4b9b-aa1e-73186cca17de. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1191.062099] env[68569]: DEBUG nova.network.neutron [req-819ee5b4-7e58-414a-83e5-aee232087071 req-0d720d8c-0fac-45ae-b199-680d1d3885ad service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Updating instance_info_cache with network_info: [{"id": "518ea6b8-1dba-4b9b-aa1e-73186cca17de", "address": "fa:16:3e:85:26:c6", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap518ea6b8-1d", "ovs_interfaceid": "518ea6b8-1dba-4b9b-aa1e-73186cca17de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.235886] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167852, 'name': RelocateVM_Task} progress is 42%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.278915] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.279103] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.279562] env[68569]: DEBUG nova.network.neutron [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1191.327905] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.328227] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.534661] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167853, 'name': ReconfigVM_Task, 'duration_secs': 0.217284} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.535184] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633727', 'volume_id': '60cd4f2b-f681-462c-959e-39f6ce745f96', 'name': 'volume-60cd4f2b-f681-462c-959e-39f6ce745f96', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '6dbe8a18-c317-4b36-bd6f-922ce9f85b6a', 'attached_at': '', 'detached_at': '', 'volume_id': '60cd4f2b-f681-462c-959e-39f6ce745f96', 'serial': '60cd4f2b-f681-462c-959e-39f6ce745f96'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1191.535935] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d46ce0c1-8f68-45dd-b826-13829af58e19 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.548059] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1191.548059] env[68569]: value = "task-3167854" [ 1191.548059] env[68569]: _type = "Task" [ 1191.548059] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.560795] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167854, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.564584] env[68569]: DEBUG oslo_concurrency.lockutils [req-819ee5b4-7e58-414a-83e5-aee232087071 req-0d720d8c-0fac-45ae-b199-680d1d3885ad service nova] Releasing lock "refresh_cache-7289fe12-4f1f-488f-9be2-a7cb666727b3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1191.597606] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.598020] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.598256] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68569) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11160}} [ 1191.738901] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167852, 'name': RelocateVM_Task} progress is 56%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.815501] env[68569]: DEBUG nova.network.neutron [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1191.831612] env[68569]: DEBUG nova.compute.utils [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1191.992662] env[68569]: DEBUG nova.network.neutron [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Updating instance_info_cache with network_info: [{"id": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "address": "fa:16:3e:42:d5:d8", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bab5cde-e1", "ovs_interfaceid": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.062984] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167854, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.237310] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167852, 'name': RelocateVM_Task} progress is 71%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.325508] env[68569]: DEBUG nova.compute.manager [req-b8515e02-2ca1-45ae-a6b1-ee7204c1ed55 req-2b2959f0-61dd-462e-9017-f493c5dc74af service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Received event network-vif-plugged-5bab5cde-e125-4604-9b7a-f3e491b5e7c8 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1192.325508] env[68569]: DEBUG oslo_concurrency.lockutils [req-b8515e02-2ca1-45ae-a6b1-ee7204c1ed55 req-2b2959f0-61dd-462e-9017-f493c5dc74af service nova] Acquiring lock "693d59a2-f8f5-4f63-af55-192b0c458ddf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.325508] env[68569]: DEBUG oslo_concurrency.lockutils [req-b8515e02-2ca1-45ae-a6b1-ee7204c1ed55 req-2b2959f0-61dd-462e-9017-f493c5dc74af service nova] Lock "693d59a2-f8f5-4f63-af55-192b0c458ddf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.325508] env[68569]: DEBUG oslo_concurrency.lockutils [req-b8515e02-2ca1-45ae-a6b1-ee7204c1ed55 req-2b2959f0-61dd-462e-9017-f493c5dc74af service nova] Lock "693d59a2-f8f5-4f63-af55-192b0c458ddf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.325508] env[68569]: DEBUG nova.compute.manager [req-b8515e02-2ca1-45ae-a6b1-ee7204c1ed55 req-2b2959f0-61dd-462e-9017-f493c5dc74af service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] No waiting events found dispatching network-vif-plugged-5bab5cde-e125-4604-9b7a-f3e491b5e7c8 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1192.325508] env[68569]: WARNING nova.compute.manager [req-b8515e02-2ca1-45ae-a6b1-ee7204c1ed55 req-2b2959f0-61dd-462e-9017-f493c5dc74af service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Received unexpected event network-vif-plugged-5bab5cde-e125-4604-9b7a-f3e491b5e7c8 for instance with vm_state building and task_state spawning. [ 1192.325508] env[68569]: DEBUG nova.compute.manager [req-b8515e02-2ca1-45ae-a6b1-ee7204c1ed55 req-2b2959f0-61dd-462e-9017-f493c5dc74af service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Received event network-changed-5bab5cde-e125-4604-9b7a-f3e491b5e7c8 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1192.325739] env[68569]: DEBUG nova.compute.manager [req-b8515e02-2ca1-45ae-a6b1-ee7204c1ed55 req-2b2959f0-61dd-462e-9017-f493c5dc74af service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Refreshing instance network info cache due to event network-changed-5bab5cde-e125-4604-9b7a-f3e491b5e7c8. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1192.326026] env[68569]: DEBUG oslo_concurrency.lockutils [req-b8515e02-2ca1-45ae-a6b1-ee7204c1ed55 req-2b2959f0-61dd-462e-9017-f493c5dc74af service nova] Acquiring lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.334285] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.495340] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.495790] env[68569]: DEBUG nova.compute.manager [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Instance network_info: |[{"id": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "address": "fa:16:3e:42:d5:d8", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bab5cde-e1", "ovs_interfaceid": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1192.496151] env[68569]: DEBUG oslo_concurrency.lockutils [req-b8515e02-2ca1-45ae-a6b1-ee7204c1ed55 req-2b2959f0-61dd-462e-9017-f493c5dc74af service nova] Acquired lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.496350] env[68569]: DEBUG nova.network.neutron [req-b8515e02-2ca1-45ae-a6b1-ee7204c1ed55 req-2b2959f0-61dd-462e-9017-f493c5dc74af service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Refreshing network info cache for port 5bab5cde-e125-4604-9b7a-f3e491b5e7c8 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1192.497725] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:d5:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4df917f7-847a-4c0e-b0e3-69a52e4a1554', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5bab5cde-e125-4604-9b7a-f3e491b5e7c8', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1192.507675] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1192.508843] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1192.509139] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f68d47f2-ff50-4b4a-aeb1-baeb94544e59 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.535205] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1192.535205] env[68569]: value = "task-3167855" [ 1192.535205] env[68569]: _type = "Task" [ 1192.535205] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.547487] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167855, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.562435] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167854, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.597344] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.739168] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167852, 'name': RelocateVM_Task} progress is 86%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.047317] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167855, 'name': CreateVM_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.061044] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167854, 'name': Rename_Task, 'duration_secs': 1.241101} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.061463] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1193.062182] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8083ea66-d939-4582-9958-46db21a593c3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.072585] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1193.072585] env[68569]: value = "task-3167856" [ 1193.072585] env[68569]: _type = "Task" [ 1193.072585] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.083860] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167856, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.102052] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.102052] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.102052] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.102052] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68569) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1193.102876] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d35c12-2feb-4e79-b8da-d3da1d6d16d6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.112237] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f589946f-340b-467d-9935-ca94877195ed {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.132364] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1022c4a5-bc1a-4aa4-8010-e285037d9798 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.140545] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7156cb-8f45-4072-9e18-d5754ac2fa2d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.173108] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179708MB free_disk=129GB free_vcpus=48 pci_devices=None {{(pid=68569) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1193.173274] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.173504] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.237193] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167852, 'name': RelocateVM_Task} progress is 97%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.297455] env[68569]: DEBUG nova.network.neutron [req-b8515e02-2ca1-45ae-a6b1-ee7204c1ed55 req-2b2959f0-61dd-462e-9017-f493c5dc74af service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Updated VIF entry in instance network info cache for port 5bab5cde-e125-4604-9b7a-f3e491b5e7c8. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1193.297996] env[68569]: DEBUG nova.network.neutron [req-b8515e02-2ca1-45ae-a6b1-ee7204c1ed55 req-2b2959f0-61dd-462e-9017-f493c5dc74af service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Updating instance_info_cache with network_info: [{"id": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "address": "fa:16:3e:42:d5:d8", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bab5cde-e1", "ovs_interfaceid": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.408831] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.409153] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.409408] env[68569]: INFO nova.compute.manager [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Attaching volume 5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e to /dev/sdb [ 1193.443808] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc74ccd-6a3d-497d-8c67-d60e297d5a54 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.453243] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48a3c2c-c931-45a2-b201-0dfac908e83c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.469702] env[68569]: DEBUG nova.virt.block_device [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Updating existing volume attachment record: d3ce4893-14e1-49d3-b6ac-d7e11ed106f6 {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1193.547278] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167855, 'name': CreateVM_Task, 'duration_secs': 0.624066} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.547563] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1193.548170] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.548339] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1193.548643] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1193.548911] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ee4fec4-050f-45ed-a298-50acee6a9997 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.553782] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1193.553782] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52822ea5-1897-7e98-9817-69f23f693e41" [ 1193.553782] env[68569]: _type = "Task" [ 1193.553782] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.564737] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52822ea5-1897-7e98-9817-69f23f693e41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.568558] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.568769] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.583707] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167856, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.737563] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167852, 'name': RelocateVM_Task} progress is 98%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.801080] env[68569]: DEBUG oslo_concurrency.lockutils [req-b8515e02-2ca1-45ae-a6b1-ee7204c1ed55 req-2b2959f0-61dd-462e-9017-f493c5dc74af service nova] Releasing lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.067926] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52822ea5-1897-7e98-9817-69f23f693e41, 'name': SearchDatastore_Task, 'duration_secs': 0.012171} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.068424] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.068736] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1194.069094] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.069320] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.069579] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1194.069916] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4cec1fd6-9981-4aa9-aea0-2d02a99c0bfa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.073375] env[68569]: DEBUG nova.compute.utils [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1194.088323] env[68569]: DEBUG oslo_vmware.api [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167856, 'name': PowerOnVM_Task, 'duration_secs': 0.652392} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.090362] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1194.092428] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1194.092655] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1194.093821] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9cc209a-3faa-4694-a9b2-902c75ed5b86 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.100597] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1194.100597] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5254f319-5ae8-1072-8a04-c376906dd6a4" [ 1194.100597] env[68569]: _type = "Task" [ 1194.100597] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.110397] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5254f319-5ae8-1072-8a04-c376906dd6a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.192122] env[68569]: DEBUG nova.compute.manager [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1194.193133] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5ad337-c4fb-4002-b5b1-65cb3033e9eb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.203767] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 61aa0997-ffa6-4551-bdaa-132026e240f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.203881] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance db75de86-9dda-42b2-9e7a-55e2ba5adad1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.204046] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance fdcdd4b5-82bd-43c9-8865-807f86789a99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.204101] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 5de9a459-a2a2-4d78-9a66-cf819e8893b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.204206] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 09f4018b-f1cd-4726-b871-b110a7cf1b43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.204319] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 3363dac8-c5df-405e-8bdc-9002e2d45e05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.204429] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.204536] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.204644] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 6317f756-c9ed-4858-bb2a-c20d9f82f90d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.204752] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 7289fe12-4f1f-488f-9be2-a7cb666727b3 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.204859] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 693d59a2-f8f5-4f63-af55-192b0c458ddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.205054] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1194.205190] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2624MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1194.238195] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167852, 'name': RelocateVM_Task, 'duration_secs': 3.067243} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.238515] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1194.238720] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633728', 'volume_id': '5f7eaca7-05fb-4187-bfc2-ecf09e846667', 'name': 'volume-5f7eaca7-05fb-4187-bfc2-ecf09e846667', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7289fe12-4f1f-488f-9be2-a7cb666727b3', 'attached_at': '', 'detached_at': '', 'volume_id': '5f7eaca7-05fb-4187-bfc2-ecf09e846667', 'serial': '5f7eaca7-05fb-4187-bfc2-ecf09e846667'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1194.239596] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb1b9e8-40da-4dbb-8f23-311ba924da5a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.259793] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050e953b-f913-4790-9aeb-99611311c8ec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.285205] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] volume-5f7eaca7-05fb-4187-bfc2-ecf09e846667/volume-5f7eaca7-05fb-4187-bfc2-ecf09e846667.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1194.287907] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af2e9dd9-d617-416f-b3be-6d2f29b640d6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.309489] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1194.309489] env[68569]: value = "task-3167858" [ 1194.309489] env[68569]: _type = "Task" [ 1194.309489] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.322073] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167858, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.401948] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ad86d5-ba39-4311-b341-0340c2e173a8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.410943] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8647dd1e-ed4b-46a0-a034-77f5631ed431 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.444809] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a681fab8-e568-4284-bc5a-c722b5833d5a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.454382] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99915eb5-cc8d-4e9e-8f83-f2814c946b86 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.470062] env[68569]: DEBUG nova.compute.provider_tree [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1194.577588] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.612723] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5254f319-5ae8-1072-8a04-c376906dd6a4, 'name': SearchDatastore_Task, 'duration_secs': 0.022315} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.613613] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16eb093b-f3e2-412d-a090-98c47c670e0c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.620346] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1194.620346] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f7f10d-57da-ef02-dc66-68a1eb472cf1" [ 1194.620346] env[68569]: _type = "Task" [ 1194.620346] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.631064] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f7f10d-57da-ef02-dc66-68a1eb472cf1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.715205] env[68569]: DEBUG oslo_concurrency.lockutils [None req-1b92d110-e9f8-4ad5-be15-a19219a7c175 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 28.941s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.716132] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.801s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.716389] env[68569]: INFO nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] During sync_power_state the instance has a pending task (spawning). Skip. [ 1194.716832] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.821120] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167858, 'name': ReconfigVM_Task, 'duration_secs': 0.382846} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.821416] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Reconfigured VM instance instance-00000072 to attach disk [datastore2] volume-5f7eaca7-05fb-4187-bfc2-ecf09e846667/volume-5f7eaca7-05fb-4187-bfc2-ecf09e846667.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1194.826061] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a48eb8a3-9ca2-4024-a0ba-741ad4b28de2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.843167] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1194.843167] env[68569]: value = "task-3167859" [ 1194.843167] env[68569]: _type = "Task" [ 1194.843167] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.851681] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167859, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.973497] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1195.131195] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f7f10d-57da-ef02-dc66-68a1eb472cf1, 'name': SearchDatastore_Task, 'duration_secs': 0.013439} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.131494] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.131803] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 693d59a2-f8f5-4f63-af55-192b0c458ddf/693d59a2-f8f5-4f63-af55-192b0c458ddf.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1195.132090] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46a1c46b-742b-4c0f-a182-931d56569cd0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.143803] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1195.143803] env[68569]: value = "task-3167860" [ 1195.143803] env[68569]: _type = "Task" [ 1195.143803] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.153023] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167860, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.355131] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167859, 'name': ReconfigVM_Task, 'duration_secs': 0.132975} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.355641] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633728', 'volume_id': '5f7eaca7-05fb-4187-bfc2-ecf09e846667', 'name': 'volume-5f7eaca7-05fb-4187-bfc2-ecf09e846667', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7289fe12-4f1f-488f-9be2-a7cb666727b3', 'attached_at': '', 'detached_at': '', 'volume_id': '5f7eaca7-05fb-4187-bfc2-ecf09e846667', 'serial': '5f7eaca7-05fb-4187-bfc2-ecf09e846667'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1195.356515] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fdd0f253-dfe6-4994-b410-21eadf795e26 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.365260] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1195.365260] env[68569]: value = "task-3167861" [ 1195.365260] env[68569]: _type = "Task" [ 1195.365260] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.376504] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167861, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.478370] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1195.478628] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.305s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.639338] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.639782] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.639938] env[68569]: INFO nova.compute.manager [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Attaching volume e754f12a-d06a-4cae-b424-c50627fcbe38 to /dev/sdb [ 1195.659448] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167860, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.678752] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c800b92e-2db6-4bd1-ad74-5cdc6388b4cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.687380] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314792dc-a5da-4f67-b86e-9b1e195b1c24 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.704758] env[68569]: DEBUG nova.virt.block_device [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Updating existing volume attachment record: d85be893-ca74-46ee-9a30-535fdbdefbc4 {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1195.876047] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167861, 'name': Rename_Task, 'duration_secs': 0.383234} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.876336] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1195.876579] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-914b61f3-0bca-4d5c-b1da-9ee6cbd39172 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.882337] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1195.882337] env[68569]: value = "task-3167864" [ 1195.882337] env[68569]: _type = "Task" [ 1195.882337] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.890366] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167864, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.155640] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167860, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544489} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.155967] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 693d59a2-f8f5-4f63-af55-192b0c458ddf/693d59a2-f8f5-4f63-af55-192b0c458ddf.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1196.156221] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1196.156495] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d2c429b-7e9f-4e20-a1fc-5679055c5668 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.165323] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1196.165323] env[68569]: value = "task-3167865" [ 1196.165323] env[68569]: _type = "Task" [ 1196.165323] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.174601] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167865, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.399370] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167864, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.675829] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167865, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.154227} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.676159] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1196.676885] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40987aef-52a1-4422-8a82-c0fd534bb82c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.702443] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 693d59a2-f8f5-4f63-af55-192b0c458ddf/693d59a2-f8f5-4f63-af55-192b0c458ddf.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1196.702803] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a804e278-9b76-472e-b3a0-02c85ce89806 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.723274] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1196.723274] env[68569]: value = "task-3167866" [ 1196.723274] env[68569]: _type = "Task" [ 1196.723274] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.732379] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167866, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.894515] env[68569]: DEBUG oslo_vmware.api [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167864, 'name': PowerOnVM_Task, 'duration_secs': 0.89004} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.894796] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1196.895085] env[68569]: INFO nova.compute.manager [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Took 7.14 seconds to spawn the instance on the hypervisor. [ 1196.895275] env[68569]: DEBUG nova.compute.manager [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1196.896055] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c367f66-d3f2-4b50-b09a-1892b7c1407e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.234457] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167866, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.413027] env[68569]: INFO nova.compute.manager [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Took 13.71 seconds to build instance. [ 1197.734569] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167866, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.916051] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0b8b3999-f046-4b3e-ac04-0783c5096918 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "7289fe12-4f1f-488f-9be2-a7cb666727b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.217s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.916051] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "7289fe12-4f1f-488f-9be2-a7cb666727b3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 9.998s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.916206] env[68569]: INFO nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] During sync_power_state the instance has a pending task (block_device_mapping). Skip. [ 1197.916348] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "7289fe12-4f1f-488f-9be2-a7cb666727b3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.016228] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1198.016482] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633736', 'volume_id': '5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e', 'name': 'volume-5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5de9a459-a2a2-4d78-9a66-cf819e8893b6', 'attached_at': '', 'detached_at': '', 'volume_id': '5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e', 'serial': '5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1198.017409] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a9beca-6019-428c-a2ee-007506023b6a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.037018] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d342d65-83b7-45bf-9daa-5bbfcfa8a8b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.062916] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] volume-5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e/volume-5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1198.063304] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13fb834d-059d-4328-9bf5-384e2abefd96 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.082696] env[68569]: DEBUG oslo_vmware.api [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1198.082696] env[68569]: value = "task-3167868" [ 1198.082696] env[68569]: _type = "Task" [ 1198.082696] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.093175] env[68569]: DEBUG oslo_vmware.api [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167868, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.236078] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167866, 'name': ReconfigVM_Task, 'duration_secs': 1.017479} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.236436] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 693d59a2-f8f5-4f63-af55-192b0c458ddf/693d59a2-f8f5-4f63-af55-192b0c458ddf.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1198.237208] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4e36697-c07a-4928-89c0-4e7a1e53a6ea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.246093] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1198.246093] env[68569]: value = "task-3167869" [ 1198.246093] env[68569]: _type = "Task" [ 1198.246093] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.256026] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167869, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.592694] env[68569]: DEBUG oslo_vmware.api [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167868, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.661585] env[68569]: DEBUG nova.compute.manager [req-d8fc55fd-74de-4f47-9d7e-affa3c8e1958 req-1d3002d4-15f1-4463-8a81-00a07d630f5e service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Received event network-changed-5d59acab-5f9d-44bc-ac8c-231dda0ac182 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1198.661786] env[68569]: DEBUG nova.compute.manager [req-d8fc55fd-74de-4f47-9d7e-affa3c8e1958 req-1d3002d4-15f1-4463-8a81-00a07d630f5e service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Refreshing instance network info cache due to event network-changed-5d59acab-5f9d-44bc-ac8c-231dda0ac182. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1198.661992] env[68569]: DEBUG oslo_concurrency.lockutils [req-d8fc55fd-74de-4f47-9d7e-affa3c8e1958 req-1d3002d4-15f1-4463-8a81-00a07d630f5e service nova] Acquiring lock "refresh_cache-61aa0997-ffa6-4551-bdaa-132026e240f9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.662146] env[68569]: DEBUG oslo_concurrency.lockutils [req-d8fc55fd-74de-4f47-9d7e-affa3c8e1958 req-1d3002d4-15f1-4463-8a81-00a07d630f5e service nova] Acquired lock "refresh_cache-61aa0997-ffa6-4551-bdaa-132026e240f9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1198.662303] env[68569]: DEBUG nova.network.neutron [req-d8fc55fd-74de-4f47-9d7e-affa3c8e1958 req-1d3002d4-15f1-4463-8a81-00a07d630f5e service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Refreshing network info cache for port 5d59acab-5f9d-44bc-ac8c-231dda0ac182 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1198.756918] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167869, 'name': Rename_Task, 'duration_secs': 0.276005} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.757297] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1198.757462] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea7ca23f-f350-4a45-b164-8e01427a59c3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.764688] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1198.764688] env[68569]: value = "task-3167870" [ 1198.764688] env[68569]: _type = "Task" [ 1198.764688] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.772966] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167870, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.094294] env[68569]: DEBUG oslo_vmware.api [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167868, 'name': ReconfigVM_Task, 'duration_secs': 0.829202} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.094294] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Reconfigured VM instance instance-0000006d to attach disk [datastore2] volume-5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e/volume-5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1199.098593] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-041a4464-ab5b-497b-bf8e-99852902723b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.117543] env[68569]: DEBUG oslo_vmware.api [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1199.117543] env[68569]: value = "task-3167871" [ 1199.117543] env[68569]: _type = "Task" [ 1199.117543] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.126731] env[68569]: DEBUG oslo_vmware.api [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167871, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.275468] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167870, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.378280] env[68569]: DEBUG nova.network.neutron [req-d8fc55fd-74de-4f47-9d7e-affa3c8e1958 req-1d3002d4-15f1-4463-8a81-00a07d630f5e service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Updated VIF entry in instance network info cache for port 5d59acab-5f9d-44bc-ac8c-231dda0ac182. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1199.378712] env[68569]: DEBUG nova.network.neutron [req-d8fc55fd-74de-4f47-9d7e-affa3c8e1958 req-1d3002d4-15f1-4463-8a81-00a07d630f5e service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Updating instance_info_cache with network_info: [{"id": "5d59acab-5f9d-44bc-ac8c-231dda0ac182", "address": "fa:16:3e:b2:78:e5", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d59acab-5f", "ovs_interfaceid": "5d59acab-5f9d-44bc-ac8c-231dda0ac182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.405774] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52082b22-a525-13e4-3d02-6d5c55ea2f36/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1199.406665] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beec9a6e-3b4c-44ca-bbf5-5274b59d0ce6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.413955] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52082b22-a525-13e4-3d02-6d5c55ea2f36/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1199.414136] env[68569]: ERROR oslo_vmware.rw_handles [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52082b22-a525-13e4-3d02-6d5c55ea2f36/disk-0.vmdk due to incomplete transfer. [ 1199.414356] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4c74b462-96ab-4ab8-b990-6ab6077800c2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.421947] env[68569]: DEBUG oslo_vmware.rw_handles [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52082b22-a525-13e4-3d02-6d5c55ea2f36/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1199.422164] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Uploaded image 5f41e5c3-e84e-4f6c-bb68-aa7db4c585c6 to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1199.424414] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1199.424668] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-41c92b8f-f531-4fcb-b5be-a64edad4ec2e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.432014] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1199.432014] env[68569]: value = "task-3167872" [ 1199.432014] env[68569]: _type = "Task" [ 1199.432014] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.441591] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167872, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.630300] env[68569]: DEBUG oslo_vmware.api [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167871, 'name': ReconfigVM_Task, 'duration_secs': 0.215525} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.630628] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633736', 'volume_id': '5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e', 'name': 'volume-5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5de9a459-a2a2-4d78-9a66-cf819e8893b6', 'attached_at': '', 'detached_at': '', 'volume_id': '5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e', 'serial': '5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1199.775784] env[68569]: DEBUG oslo_vmware.api [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167870, 'name': PowerOnVM_Task, 'duration_secs': 0.862666} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.776160] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1199.776277] env[68569]: INFO nova.compute.manager [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Took 9.37 seconds to spawn the instance on the hypervisor. [ 1199.776450] env[68569]: DEBUG nova.compute.manager [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1199.777229] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72e3d77-5ae6-4730-9a0c-f2335d56feb1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.882021] env[68569]: DEBUG oslo_concurrency.lockutils [req-d8fc55fd-74de-4f47-9d7e-affa3c8e1958 req-1d3002d4-15f1-4463-8a81-00a07d630f5e service nova] Releasing lock "refresh_cache-61aa0997-ffa6-4551-bdaa-132026e240f9" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1199.943385] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167872, 'name': Destroy_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.071155] env[68569]: DEBUG nova.compute.manager [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Stashing vm_state: active {{(pid=68569) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1200.253193] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1200.253378] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633737', 'volume_id': 'e754f12a-d06a-4cae-b424-c50627fcbe38', 'name': 'volume-e754f12a-d06a-4cae-b424-c50627fcbe38', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '09f4018b-f1cd-4726-b871-b110a7cf1b43', 'attached_at': '', 'detached_at': '', 'volume_id': 'e754f12a-d06a-4cae-b424-c50627fcbe38', 'serial': 'e754f12a-d06a-4cae-b424-c50627fcbe38'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1200.254285] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a06763-192a-493e-bf88-532bfb942a7b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.272260] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0b72ed-8719-4a06-a6a4-b538ad5ff2d9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.301858] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] volume-e754f12a-d06a-4cae-b424-c50627fcbe38/volume-e754f12a-d06a-4cae-b424-c50627fcbe38.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1200.303710] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f1d05d7-6b75-4921-8cd2-ed978c629bf9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.316702] env[68569]: INFO nova.compute.manager [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Took 14.24 seconds to build instance. [ 1200.325810] env[68569]: DEBUG oslo_vmware.api [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1200.325810] env[68569]: value = "task-3167873" [ 1200.325810] env[68569]: _type = "Task" [ 1200.325810] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.338634] env[68569]: DEBUG oslo_vmware.api [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167873, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.443331] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167872, 'name': Destroy_Task, 'duration_secs': 0.979329} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.443622] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Destroyed the VM [ 1200.443855] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1200.444118] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ec86ed0d-25ec-4287-8fc7-7614607d872a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.451174] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1200.451174] env[68569]: value = "task-3167874" [ 1200.451174] env[68569]: _type = "Task" [ 1200.451174] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.462422] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167874, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.595176] env[68569]: DEBUG oslo_concurrency.lockutils [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.595520] env[68569]: DEBUG oslo_concurrency.lockutils [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.667744] env[68569]: DEBUG nova.objects.instance [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lazy-loading 'flavor' on Instance uuid 5de9a459-a2a2-4d78-9a66-cf819e8893b6 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1200.692344] env[68569]: DEBUG nova.compute.manager [req-ba3e287c-194e-4479-bb35-5cc7146cb965 req-78c7bc6a-1e59-4a33-8e92-04823518df91 service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Received event network-changed-518ea6b8-1dba-4b9b-aa1e-73186cca17de {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1200.692344] env[68569]: DEBUG nova.compute.manager [req-ba3e287c-194e-4479-bb35-5cc7146cb965 req-78c7bc6a-1e59-4a33-8e92-04823518df91 service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Refreshing instance network info cache due to event network-changed-518ea6b8-1dba-4b9b-aa1e-73186cca17de. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1200.692539] env[68569]: DEBUG oslo_concurrency.lockutils [req-ba3e287c-194e-4479-bb35-5cc7146cb965 req-78c7bc6a-1e59-4a33-8e92-04823518df91 service nova] Acquiring lock "refresh_cache-7289fe12-4f1f-488f-9be2-a7cb666727b3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.692683] env[68569]: DEBUG oslo_concurrency.lockutils [req-ba3e287c-194e-4479-bb35-5cc7146cb965 req-78c7bc6a-1e59-4a33-8e92-04823518df91 service nova] Acquired lock "refresh_cache-7289fe12-4f1f-488f-9be2-a7cb666727b3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1200.692849] env[68569]: DEBUG nova.network.neutron [req-ba3e287c-194e-4479-bb35-5cc7146cb965 req-78c7bc6a-1e59-4a33-8e92-04823518df91 service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Refreshing network info cache for port 518ea6b8-1dba-4b9b-aa1e-73186cca17de {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1200.822025] env[68569]: DEBUG oslo_concurrency.lockutils [None req-76a282b8-9ed2-4c01-87a1-b87068067b99 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "693d59a2-f8f5-4f63-af55-192b0c458ddf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.751s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.822025] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "693d59a2-f8f5-4f63-af55-192b0c458ddf" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.901s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.822025] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e216c4e-2480-49bb-b84e-5eda2186ae88 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.841495] env[68569]: DEBUG oslo_vmware.api [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167873, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.962282] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167874, 'name': RemoveSnapshot_Task} progress is 26%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.100807] env[68569]: INFO nova.compute.claims [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1201.173962] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ab6e6dc6-00b0-4848-a7fc-7ec273cc8735 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.765s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.337738] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "693d59a2-f8f5-4f63-af55-192b0c458ddf" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.518s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.338195] env[68569]: DEBUG oslo_vmware.api [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167873, 'name': ReconfigVM_Task, 'duration_secs': 0.575954} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.340865] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Reconfigured VM instance instance-0000006e to attach disk [datastore2] volume-e754f12a-d06a-4cae-b424-c50627fcbe38/volume-e754f12a-d06a-4cae-b424-c50627fcbe38.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1201.345946] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-975d1d7f-42b5-468c-ab6e-0b72f31c491c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.364673] env[68569]: DEBUG oslo_vmware.api [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1201.364673] env[68569]: value = "task-3167875" [ 1201.364673] env[68569]: _type = "Task" [ 1201.364673] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.375094] env[68569]: DEBUG oslo_vmware.api [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167875, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.467128] env[68569]: DEBUG oslo_vmware.api [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167874, 'name': RemoveSnapshot_Task, 'duration_secs': 1.012543} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.467412] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1201.467724] env[68569]: INFO nova.compute.manager [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Took 15.74 seconds to snapshot the instance on the hypervisor. [ 1201.608117] env[68569]: INFO nova.compute.resource_tracker [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Updating resource usage from migration 837b3272-d5f4-4587-9c2f-100c4fc94aef [ 1201.806476] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c87ade9b-7f6b-41a0-b5ca-57298032b26a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.819112] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a81bf2-d75c-474f-b33b-c70846113cd9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.857282] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf240c2c-7716-41e1-8d50-904f225a10c7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.868993] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb1f102-e2a0-4a8a-8a6c-7c222d1ca862 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.878491] env[68569]: DEBUG oslo_vmware.api [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167875, 'name': ReconfigVM_Task, 'duration_secs': 0.205836} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.886411] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633737', 'volume_id': 'e754f12a-d06a-4cae-b424-c50627fcbe38', 'name': 'volume-e754f12a-d06a-4cae-b424-c50627fcbe38', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '09f4018b-f1cd-4726-b871-b110a7cf1b43', 'attached_at': '', 'detached_at': '', 'volume_id': 'e754f12a-d06a-4cae-b424-c50627fcbe38', 'serial': 'e754f12a-d06a-4cae-b424-c50627fcbe38'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1201.888234] env[68569]: DEBUG nova.compute.provider_tree [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1201.946745] env[68569]: DEBUG nova.network.neutron [req-ba3e287c-194e-4479-bb35-5cc7146cb965 req-78c7bc6a-1e59-4a33-8e92-04823518df91 service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Updated VIF entry in instance network info cache for port 518ea6b8-1dba-4b9b-aa1e-73186cca17de. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1201.947136] env[68569]: DEBUG nova.network.neutron [req-ba3e287c-194e-4479-bb35-5cc7146cb965 req-78c7bc6a-1e59-4a33-8e92-04823518df91 service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Updating instance_info_cache with network_info: [{"id": "518ea6b8-1dba-4b9b-aa1e-73186cca17de", "address": "fa:16:3e:85:26:c6", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap518ea6b8-1d", "ovs_interfaceid": "518ea6b8-1dba-4b9b-aa1e-73186cca17de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.027853] env[68569]: DEBUG nova.compute.manager [None req-e09e1281-3532-48bf-a1cf-ec6c5c2152c7 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Found 1 images (rotation: 2) {{(pid=68569) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1202.258934] env[68569]: DEBUG nova.compute.manager [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Stashing vm_state: active {{(pid=68569) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1202.315921] env[68569]: DEBUG nova.compute.manager [req-decb4f07-3978-4e5a-b479-a699af28702a req-4467ebda-c75b-468f-ae04-aa1645ec9001 service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Received event network-changed-5bab5cde-e125-4604-9b7a-f3e491b5e7c8 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1202.316848] env[68569]: DEBUG nova.compute.manager [req-decb4f07-3978-4e5a-b479-a699af28702a req-4467ebda-c75b-468f-ae04-aa1645ec9001 service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Refreshing instance network info cache due to event network-changed-5bab5cde-e125-4604-9b7a-f3e491b5e7c8. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1202.316848] env[68569]: DEBUG oslo_concurrency.lockutils [req-decb4f07-3978-4e5a-b479-a699af28702a req-4467ebda-c75b-468f-ae04-aa1645ec9001 service nova] Acquiring lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.316848] env[68569]: DEBUG oslo_concurrency.lockutils [req-decb4f07-3978-4e5a-b479-a699af28702a req-4467ebda-c75b-468f-ae04-aa1645ec9001 service nova] Acquired lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1202.316848] env[68569]: DEBUG nova.network.neutron [req-decb4f07-3978-4e5a-b479-a699af28702a req-4467ebda-c75b-468f-ae04-aa1645ec9001 service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Refreshing network info cache for port 5bab5cde-e125-4604-9b7a-f3e491b5e7c8 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1202.412743] env[68569]: ERROR nova.scheduler.client.report [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [req-7e6cbf23-36b9-48b4-9598-a6007f826c71] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7e6cbf23-36b9-48b4-9598-a6007f826c71"}]} [ 1202.428628] env[68569]: DEBUG nova.scheduler.client.report [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1202.442938] env[68569]: DEBUG nova.scheduler.client.report [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1202.442938] env[68569]: DEBUG nova.compute.provider_tree [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1202.450112] env[68569]: DEBUG oslo_concurrency.lockutils [req-ba3e287c-194e-4479-bb35-5cc7146cb965 req-78c7bc6a-1e59-4a33-8e92-04823518df91 service nova] Releasing lock "refresh_cache-7289fe12-4f1f-488f-9be2-a7cb666727b3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1202.454627] env[68569]: DEBUG nova.scheduler.client.report [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1202.476933] env[68569]: DEBUG nova.scheduler.client.report [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1202.546291] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.546561] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.658748] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c626bd-696a-48bb-8726-a15525bd1a3d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.667710] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063d392c-2994-41cf-8a7e-bc56874acdf1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.699470] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238354d4-7d94-42ed-81da-48deb74877d6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.708102] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e195f27-f477-488b-b79d-9a327ee2a851 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.724975] env[68569]: DEBUG nova.compute.provider_tree [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1202.731630] env[68569]: DEBUG nova.compute.manager [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Received event network-changed-1ca2f3e3-cee0-4e29-8728-97455622c4be {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1202.731774] env[68569]: DEBUG nova.compute.manager [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Refreshing instance network info cache due to event network-changed-1ca2f3e3-cee0-4e29-8728-97455622c4be. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1202.732011] env[68569]: DEBUG oslo_concurrency.lockutils [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] Acquiring lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.732173] env[68569]: DEBUG oslo_concurrency.lockutils [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] Acquired lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1202.732347] env[68569]: DEBUG nova.network.neutron [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Refreshing network info cache for port 1ca2f3e3-cee0-4e29-8728-97455622c4be {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1202.778142] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.865432] env[68569]: DEBUG nova.compute.manager [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1202.866403] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60cf82c-2d40-4228-9467-0913e6be40cf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.924572] env[68569]: DEBUG nova.objects.instance [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lazy-loading 'flavor' on Instance uuid 09f4018b-f1cd-4726-b871-b110a7cf1b43 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1203.017926] env[68569]: DEBUG nova.network.neutron [req-decb4f07-3978-4e5a-b479-a699af28702a req-4467ebda-c75b-468f-ae04-aa1645ec9001 service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Updated VIF entry in instance network info cache for port 5bab5cde-e125-4604-9b7a-f3e491b5e7c8. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1203.018369] env[68569]: DEBUG nova.network.neutron [req-decb4f07-3978-4e5a-b479-a699af28702a req-4467ebda-c75b-468f-ae04-aa1645ec9001 service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Updating instance_info_cache with network_info: [{"id": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "address": "fa:16:3e:42:d5:d8", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bab5cde-e1", "ovs_interfaceid": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.050219] env[68569]: DEBUG nova.compute.utils [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1203.256814] env[68569]: DEBUG nova.scheduler.client.report [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 174 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1203.257102] env[68569]: DEBUG nova.compute.provider_tree [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 174 to 175 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1203.257280] env[68569]: DEBUG nova.compute.provider_tree [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1203.382456] env[68569]: INFO nova.compute.manager [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] instance snapshotting [ 1203.383094] env[68569]: DEBUG nova.objects.instance [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lazy-loading 'flavor' on Instance uuid fdcdd4b5-82bd-43c9-8865-807f86789a99 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1203.429869] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c4e7d4dd-eb04-496f-ae98-0a3398a9b2ff tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.790s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.475621] env[68569]: DEBUG nova.network.neutron [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updated VIF entry in instance network info cache for port 1ca2f3e3-cee0-4e29-8728-97455622c4be. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1203.476059] env[68569]: DEBUG nova.network.neutron [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updating instance_info_cache with network_info: [{"id": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "address": "fa:16:3e:6d:8f:9f", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ca2f3e3-ce", "ovs_interfaceid": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.520945] env[68569]: DEBUG oslo_concurrency.lockutils [req-decb4f07-3978-4e5a-b479-a699af28702a req-4467ebda-c75b-468f-ae04-aa1645ec9001 service nova] Releasing lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1203.553057] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.763534] env[68569]: DEBUG oslo_concurrency.lockutils [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.168s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.763712] env[68569]: INFO nova.compute.manager [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Migrating [ 1203.774874] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.997s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.890057] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095c78f0-67a4-4358-8c83-f079922cc207 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.908853] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7f38b6-8ee5-4399-8618-565ff4d4921d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.979425] env[68569]: DEBUG oslo_concurrency.lockutils [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] Releasing lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1203.980182] env[68569]: DEBUG nova.compute.manager [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Received event network-changed-5bab5cde-e125-4604-9b7a-f3e491b5e7c8 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1203.980182] env[68569]: DEBUG nova.compute.manager [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Refreshing instance network info cache due to event network-changed-5bab5cde-e125-4604-9b7a-f3e491b5e7c8. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1203.980182] env[68569]: DEBUG oslo_concurrency.lockutils [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] Acquiring lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.980428] env[68569]: DEBUG oslo_concurrency.lockutils [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] Acquired lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.980530] env[68569]: DEBUG nova.network.neutron [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Refreshing network info cache for port 5bab5cde-e125-4604-9b7a-f3e491b5e7c8 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1204.026932] env[68569]: DEBUG oslo_concurrency.lockutils [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.027171] env[68569]: DEBUG oslo_concurrency.lockutils [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1204.278969] env[68569]: INFO nova.compute.claims [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1204.282407] env[68569]: DEBUG oslo_concurrency.lockutils [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "refresh_cache-7289fe12-4f1f-488f-9be2-a7cb666727b3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.282567] env[68569]: DEBUG oslo_concurrency.lockutils [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquired lock "refresh_cache-7289fe12-4f1f-488f-9be2-a7cb666727b3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.282731] env[68569]: DEBUG nova.network.neutron [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1204.342268] env[68569]: DEBUG nova.compute.manager [req-709e7c49-3524-4643-91d1-16e03adbee31 req-e7645ac6-d0ad-4b66-9dc9-f0181d814f7a service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Received event network-changed-1ca2f3e3-cee0-4e29-8728-97455622c4be {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1204.342392] env[68569]: DEBUG nova.compute.manager [req-709e7c49-3524-4643-91d1-16e03adbee31 req-e7645ac6-d0ad-4b66-9dc9-f0181d814f7a service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Refreshing instance network info cache due to event network-changed-1ca2f3e3-cee0-4e29-8728-97455622c4be. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1204.342581] env[68569]: DEBUG oslo_concurrency.lockutils [req-709e7c49-3524-4643-91d1-16e03adbee31 req-e7645ac6-d0ad-4b66-9dc9-f0181d814f7a service nova] Acquiring lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.342750] env[68569]: DEBUG oslo_concurrency.lockutils [req-709e7c49-3524-4643-91d1-16e03adbee31 req-e7645ac6-d0ad-4b66-9dc9-f0181d814f7a service nova] Acquired lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.342874] env[68569]: DEBUG nova.network.neutron [req-709e7c49-3524-4643-91d1-16e03adbee31 req-e7645ac6-d0ad-4b66-9dc9-f0181d814f7a service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Refreshing network info cache for port 1ca2f3e3-cee0-4e29-8728-97455622c4be {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1204.420203] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1204.420551] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d7f928d0-85bb-47ee-9f8e-e0771e83c268 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.429782] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1204.429782] env[68569]: value = "task-3167876" [ 1204.429782] env[68569]: _type = "Task" [ 1204.429782] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.438026] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167876, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.529811] env[68569]: INFO nova.compute.manager [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Detaching volume e754f12a-d06a-4cae-b424-c50627fcbe38 [ 1204.561838] env[68569]: INFO nova.virt.block_device [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Attempting to driver detach volume e754f12a-d06a-4cae-b424-c50627fcbe38 from mountpoint /dev/sdb [ 1204.562136] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1204.562354] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633737', 'volume_id': 'e754f12a-d06a-4cae-b424-c50627fcbe38', 'name': 'volume-e754f12a-d06a-4cae-b424-c50627fcbe38', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '09f4018b-f1cd-4726-b871-b110a7cf1b43', 'attached_at': '', 'detached_at': '', 'volume_id': 'e754f12a-d06a-4cae-b424-c50627fcbe38', 'serial': 'e754f12a-d06a-4cae-b424-c50627fcbe38'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1204.563247] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a85b2be-101b-4b93-8574-ed81e78d50eb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.589682] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a8be49-9ff8-4ffb-ac36-5648cafd240f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.597437] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066e1b65-6f58-4f17-9ad6-e54614faf8dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.618905] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.619209] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1204.619422] env[68569]: INFO nova.compute.manager [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Attaching volume 7bfe5b96-5486-4bab-8423-8c0ad45bec92 to /dev/sdc [ 1204.624121] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcb8408-e397-4960-97cb-b7d2967fb30a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.642264] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] The volume has not been displaced from its original location: [datastore2] volume-e754f12a-d06a-4cae-b424-c50627fcbe38/volume-e754f12a-d06a-4cae-b424-c50627fcbe38.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1204.647686] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Reconfiguring VM instance instance-0000006e to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1204.647912] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99eeec77-3712-482a-86e8-fea88232a93e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.664727] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42304004-a9b5-49df-8c2f-aaf013ef1b23 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.675782] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c438370e-f1fe-495d-ba23-67d1dc1e1846 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.678397] env[68569]: DEBUG oslo_vmware.api [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1204.678397] env[68569]: value = "task-3167877" [ 1204.678397] env[68569]: _type = "Task" [ 1204.678397] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.688458] env[68569]: DEBUG oslo_vmware.api [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167877, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.692121] env[68569]: DEBUG nova.virt.block_device [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Updating existing volume attachment record: 52617378-e0b6-4190-aa00-275fd8d1535d {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1204.780554] env[68569]: DEBUG nova.network.neutron [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Updated VIF entry in instance network info cache for port 5bab5cde-e125-4604-9b7a-f3e491b5e7c8. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1204.780953] env[68569]: DEBUG nova.network.neutron [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Updating instance_info_cache with network_info: [{"id": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "address": "fa:16:3e:42:d5:d8", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bab5cde-e1", "ovs_interfaceid": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.786868] env[68569]: INFO nova.compute.resource_tracker [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Updating resource usage from migration bd7a4e8c-4879-407d-b0f1-217867c20528 [ 1204.950424] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167876, 'name': CreateSnapshot_Task, 'duration_secs': 0.500138} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.950424] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1204.951852] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15cb8cb-c792-4a86-bc7b-cfec94be6f3a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.029336] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f10f0e6-0070-4bc0-9af7-1f8f8b954446 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.039776] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b5d1dd-faba-460e-aa06-38b0850cb65d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.075049] env[68569]: DEBUG nova.network.neutron [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Updating instance_info_cache with network_info: [{"id": "518ea6b8-1dba-4b9b-aa1e-73186cca17de", "address": "fa:16:3e:85:26:c6", "network": {"id": "a8b2d31e-1fd9-4a0c-86ac-838a21b45392", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-109586267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f361997374e943cfa7a8e4e4884d6c65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap518ea6b8-1d", "ovs_interfaceid": "518ea6b8-1dba-4b9b-aa1e-73186cca17de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.076780] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb83861c-f7cd-4c91-a559-a19340afbd19 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.088364] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ddfda23-43a0-47d4-8c7b-7eb8497bb8bd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.104457] env[68569]: DEBUG nova.compute.provider_tree [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.113663] env[68569]: DEBUG nova.network.neutron [req-709e7c49-3524-4643-91d1-16e03adbee31 req-e7645ac6-d0ad-4b66-9dc9-f0181d814f7a service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updated VIF entry in instance network info cache for port 1ca2f3e3-cee0-4e29-8728-97455622c4be. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1205.114017] env[68569]: DEBUG nova.network.neutron [req-709e7c49-3524-4643-91d1-16e03adbee31 req-e7645ac6-d0ad-4b66-9dc9-f0181d814f7a service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updating instance_info_cache with network_info: [{"id": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "address": "fa:16:3e:6d:8f:9f", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ca2f3e3-ce", "ovs_interfaceid": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.189532] env[68569]: DEBUG oslo_vmware.api [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167877, 'name': ReconfigVM_Task, 'duration_secs': 0.289039} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.189815] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Reconfigured VM instance instance-0000006e to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1205.194995] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55716704-65bc-4f98-be70-8c56411f2ceb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.212526] env[68569]: DEBUG oslo_vmware.api [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1205.212526] env[68569]: value = "task-3167879" [ 1205.212526] env[68569]: _type = "Task" [ 1205.212526] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.221709] env[68569]: DEBUG oslo_vmware.api [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167879, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.283678] env[68569]: DEBUG oslo_concurrency.lockutils [req-ea04ceaa-10a3-4ac0-a53e-35a02d8a9595 req-54bc4d07-415e-4cde-b351-6618a6af6ec4 service nova] Releasing lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.473468] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Acquiring lock "db75de86-9dda-42b2-9e7a-55e2ba5adad1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.473744] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Lock "db75de86-9dda-42b2-9e7a-55e2ba5adad1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.473967] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Acquiring lock "db75de86-9dda-42b2-9e7a-55e2ba5adad1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.474164] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Lock "db75de86-9dda-42b2-9e7a-55e2ba5adad1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.474333] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Lock "db75de86-9dda-42b2-9e7a-55e2ba5adad1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.482424] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1205.482925] env[68569]: INFO nova.compute.manager [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Terminating instance [ 1205.484220] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2c6157bc-6872-4191-8aeb-a9c76f5e77ca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.494731] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1205.494731] env[68569]: value = "task-3167880" [ 1205.494731] env[68569]: _type = "Task" [ 1205.494731] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.524167] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167880, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.583109] env[68569]: DEBUG oslo_concurrency.lockutils [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Releasing lock "refresh_cache-7289fe12-4f1f-488f-9be2-a7cb666727b3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.607757] env[68569]: DEBUG nova.scheduler.client.report [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1205.616979] env[68569]: DEBUG oslo_concurrency.lockutils [req-709e7c49-3524-4643-91d1-16e03adbee31 req-e7645ac6-d0ad-4b66-9dc9-f0181d814f7a service nova] Releasing lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.723788] env[68569]: DEBUG oslo_vmware.api [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167879, 'name': ReconfigVM_Task, 'duration_secs': 0.149496} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.724841] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633737', 'volume_id': 'e754f12a-d06a-4cae-b424-c50627fcbe38', 'name': 'volume-e754f12a-d06a-4cae-b424-c50627fcbe38', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '09f4018b-f1cd-4726-b871-b110a7cf1b43', 'attached_at': '', 'detached_at': '', 'volume_id': 'e754f12a-d06a-4cae-b424-c50627fcbe38', 'serial': 'e754f12a-d06a-4cae-b424-c50627fcbe38'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1205.990592] env[68569]: DEBUG nova.compute.manager [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1205.990592] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1205.991343] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc824dd9-29ae-4379-88b9-683fb170e32f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.001240] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1206.001782] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a2f6afb-0334-412e-afd9-32ce4c529638 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.006989] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167880, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.012670] env[68569]: DEBUG oslo_vmware.api [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Waiting for the task: (returnval){ [ 1206.012670] env[68569]: value = "task-3167881" [ 1206.012670] env[68569]: _type = "Task" [ 1206.012670] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.019798] env[68569]: DEBUG oslo_vmware.api [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': task-3167881, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.114105] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.339s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.114335] env[68569]: INFO nova.compute.manager [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Migrating [ 1206.270204] env[68569]: DEBUG nova.objects.instance [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lazy-loading 'flavor' on Instance uuid 09f4018b-f1cd-4726-b871-b110a7cf1b43 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1206.506794] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167880, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.521827] env[68569]: DEBUG oslo_vmware.api [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': task-3167881, 'name': PowerOffVM_Task, 'duration_secs': 0.345436} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.521827] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1206.522061] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1206.522171] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-27ecc5d4-5908-4d7e-9c2e-c977aa290344 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.593551] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1206.593787] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1206.594013] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Deleting the datastore file [datastore2] db75de86-9dda-42b2-9e7a-55e2ba5adad1 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1206.594284] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3d8269b-369c-4f92-9d2f-462b83bcd49d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.604073] env[68569]: DEBUG oslo_vmware.api [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Waiting for the task: (returnval){ [ 1206.604073] env[68569]: value = "task-3167883" [ 1206.604073] env[68569]: _type = "Task" [ 1206.604073] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.613818] env[68569]: DEBUG oslo_vmware.api [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': task-3167883, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.629227] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "refresh_cache-47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.629382] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "refresh_cache-47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1206.629548] env[68569]: DEBUG nova.network.neutron [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1207.008673] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167880, 'name': CloneVM_Task, 'duration_secs': 1.324341} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.009032] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Created linked-clone VM from snapshot [ 1207.009685] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27467785-05d8-4eca-bac5-c1488b37ebb2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.017282] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Uploading image f54a62d2-7369-49fc-a0f4-87115e866c51 {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1207.038500] env[68569]: DEBUG oslo_vmware.rw_handles [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1207.038500] env[68569]: value = "vm-633740" [ 1207.038500] env[68569]: _type = "VirtualMachine" [ 1207.038500] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1207.038789] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-35c4c17f-4181-482d-a730-5a9b7eb31060 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.047515] env[68569]: DEBUG oslo_vmware.rw_handles [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lease: (returnval){ [ 1207.047515] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520054fa-6e1c-e768-3d9f-ba7080e3d808" [ 1207.047515] env[68569]: _type = "HttpNfcLease" [ 1207.047515] env[68569]: } obtained for exporting VM: (result){ [ 1207.047515] env[68569]: value = "vm-633740" [ 1207.047515] env[68569]: _type = "VirtualMachine" [ 1207.047515] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1207.047801] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the lease: (returnval){ [ 1207.047801] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520054fa-6e1c-e768-3d9f-ba7080e3d808" [ 1207.047801] env[68569]: _type = "HttpNfcLease" [ 1207.047801] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1207.054631] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1207.054631] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520054fa-6e1c-e768-3d9f-ba7080e3d808" [ 1207.054631] env[68569]: _type = "HttpNfcLease" [ 1207.054631] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1207.114548] env[68569]: DEBUG oslo_vmware.api [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Task: {'id': task-3167883, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13878} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.114789] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1207.114968] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1207.115159] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1207.115371] env[68569]: INFO nova.compute.manager [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1207.115714] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1207.115984] env[68569]: DEBUG nova.compute.manager [-] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1207.116151] env[68569]: DEBUG nova.network.neutron [-] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1207.278347] env[68569]: DEBUG oslo_concurrency.lockutils [None req-daeb1f65-037f-4ac1-b594-234963ec9fc3 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.251s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.351521] env[68569]: DEBUG nova.network.neutron [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Updating instance_info_cache with network_info: [{"id": "5b205295-7e30-4590-b967-0d6e1f4d416a", "address": "fa:16:3e:05:b6:6a", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b205295-7e", "ovs_interfaceid": "5b205295-7e30-4590-b967-0d6e1f4d416a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.557070] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1207.557070] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520054fa-6e1c-e768-3d9f-ba7080e3d808" [ 1207.557070] env[68569]: _type = "HttpNfcLease" [ 1207.557070] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1207.557389] env[68569]: DEBUG oslo_vmware.rw_handles [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1207.557389] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520054fa-6e1c-e768-3d9f-ba7080e3d808" [ 1207.557389] env[68569]: _type = "HttpNfcLease" [ 1207.557389] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1207.557994] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d97153-85ef-4018-86d2-2a15f27359e0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.565980] env[68569]: DEBUG oslo_vmware.rw_handles [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521b44be-8878-61d3-0aeb-1d2556bea601/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1207.566164] env[68569]: DEBUG oslo_vmware.rw_handles [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521b44be-8878-61d3-0aeb-1d2556bea601/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1207.628767] env[68569]: DEBUG nova.compute.manager [req-17ab4841-82fc-438f-820a-95bf865b7855 req-8b0d2a34-3728-48d6-b713-8c83ce8bbae6 service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Received event network-vif-deleted-b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1207.629016] env[68569]: INFO nova.compute.manager [req-17ab4841-82fc-438f-820a-95bf865b7855 req-8b0d2a34-3728-48d6-b713-8c83ce8bbae6 service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Neutron deleted interface b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9; detaching it from the instance and deleting it from the info cache [ 1207.629202] env[68569]: DEBUG nova.network.neutron [req-17ab4841-82fc-438f-820a-95bf865b7855 req-8b0d2a34-3728-48d6-b713-8c83ce8bbae6 service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.717565] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b27b020a-e5fb-4622-b741-5ba17ab153ae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.852742] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "refresh_cache-47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.130022] env[68569]: ERROR nova.compute.manager [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Setting instance vm_state to ERROR: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1208.130022] env[68569]: ERROR nova.compute.manager [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Traceback (most recent call last): [ 1208.130022] env[68569]: ERROR nova.compute.manager [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] File "/opt/stack/nova/nova/compute/manager.py", line 11478, in _error_out_instance_on_exception [ 1208.130022] env[68569]: ERROR nova.compute.manager [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] yield [ 1208.130022] env[68569]: ERROR nova.compute.manager [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 1208.130022] env[68569]: ERROR nova.compute.manager [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] disk_info = self.driver.migrate_disk_and_power_off( [ 1208.130022] env[68569]: ERROR nova.compute.manager [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1208.130022] env[68569]: ERROR nova.compute.manager [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] [ 1208.134115] env[68569]: DEBUG nova.network.neutron [-] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.135784] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79a4a214-8794-4a39-84d6-9e2e4fd62b84 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.147776] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad98842-8c15-41b9-94ea-b4d89aa39e21 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.186700] env[68569]: DEBUG nova.compute.manager [req-17ab4841-82fc-438f-820a-95bf865b7855 req-8b0d2a34-3728-48d6-b713-8c83ce8bbae6 service nova] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Detach interface failed, port_id=b7baf7ab-5d37-4dc3-99cf-b91e0632e7a9, reason: Instance db75de86-9dda-42b2-9e7a-55e2ba5adad1 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1208.410220] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1208.410482] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.410687] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "09f4018b-f1cd-4726-b871-b110a7cf1b43-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1208.411364] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.411364] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.413691] env[68569]: INFO nova.compute.manager [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Terminating instance [ 1208.636375] env[68569]: INFO nova.compute.manager [-] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Took 1.52 seconds to deallocate network for instance. [ 1208.656542] env[68569]: INFO nova.compute.manager [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Swapping old allocation on dict_keys(['a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6']) held by migration 837b3272-d5f4-4587-9c2f-100c4fc94aef for instance [ 1208.682148] env[68569]: DEBUG nova.scheduler.client.report [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Overwriting current allocation {'allocations': {'a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6': {'resources': {'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 175}}, 'project_id': 'f361997374e943cfa7a8e4e4884d6c65', 'user_id': '54ebbdfe9bfb4854a40b07d60c7a9efb', 'consumer_generation': 1} on consumer 7289fe12-4f1f-488f-9be2-a7cb666727b3 {{(pid=68569) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1208.918918] env[68569]: DEBUG nova.compute.manager [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1208.919838] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1208.921397] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2dea010-2686-444b-8dea-e17228310d95 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.929044] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1208.929390] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eda33bb2-c4fb-4f1e-a06c-45874f499a80 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.937059] env[68569]: DEBUG oslo_vmware.api [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1208.937059] env[68569]: value = "task-3167886" [ 1208.937059] env[68569]: _type = "Task" [ 1208.937059] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.945759] env[68569]: DEBUG oslo_vmware.api [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167886, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.146560] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.147055] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.147223] env[68569]: DEBUG nova.objects.instance [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Lazy-loading 'resources' on Instance uuid db75de86-9dda-42b2-9e7a-55e2ba5adad1 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1209.238017] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1209.238017] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633739', 'volume_id': '7bfe5b96-5486-4bab-8423-8c0ad45bec92', 'name': 'volume-7bfe5b96-5486-4bab-8423-8c0ad45bec92', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5de9a459-a2a2-4d78-9a66-cf819e8893b6', 'attached_at': '', 'detached_at': '', 'volume_id': '7bfe5b96-5486-4bab-8423-8c0ad45bec92', 'serial': '7bfe5b96-5486-4bab-8423-8c0ad45bec92'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1209.239891] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21d157f-8d6c-436c-8771-1635091a8e62 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.259883] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bbaf6e8-d434-479e-8941-230466322d47 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.292102] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] volume-7bfe5b96-5486-4bab-8423-8c0ad45bec92/volume-7bfe5b96-5486-4bab-8423-8c0ad45bec92.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1209.293013] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7e995d1-e4e5-49fa-b924-0330d5d8d82a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.315524] env[68569]: DEBUG oslo_vmware.api [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1209.315524] env[68569]: value = "task-3167887" [ 1209.315524] env[68569]: _type = "Task" [ 1209.315524] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.326626] env[68569]: DEBUG oslo_vmware.api [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167887, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.447432] env[68569]: DEBUG oslo_vmware.api [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167886, 'name': PowerOffVM_Task, 'duration_secs': 0.197582} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.447704] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1209.447898] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1209.448220] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7fee125-a13f-4e89-bf54-07dc8db38057 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.523601] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1209.523947] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1209.524180] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Deleting the datastore file [datastore2] 09f4018b-f1cd-4726-b871-b110a7cf1b43 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1209.524473] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5fb5a190-4c4d-4f65-a030-7ec92e3ee88c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.533972] env[68569]: DEBUG oslo_vmware.api [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for the task: (returnval){ [ 1209.533972] env[68569]: value = "task-3167889" [ 1209.533972] env[68569]: _type = "Task" [ 1209.533972] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.544177] env[68569]: DEBUG oslo_vmware.api [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167889, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.829711] env[68569]: DEBUG oslo_vmware.api [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167887, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.831511] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6a4352-fe35-44cf-8372-f1da1a171122 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.841020] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1cdfa38-a764-4c11-8e77-a07ffb8fb2fb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.883819] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3ad15c-4acd-4f08-a223-274d8993b7ce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.894079] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320cbd8a-2539-4ca8-aba0-c765d992fa4d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.911123] env[68569]: DEBUG nova.compute.provider_tree [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1209.922510] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "7289fe12-4f1f-488f-9be2-a7cb666727b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.922786] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "7289fe12-4f1f-488f-9be2-a7cb666727b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.922976] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "7289fe12-4f1f-488f-9be2-a7cb666727b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.923202] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "7289fe12-4f1f-488f-9be2-a7cb666727b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.923374] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "7289fe12-4f1f-488f-9be2-a7cb666727b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.925468] env[68569]: INFO nova.compute.manager [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Terminating instance [ 1210.046272] env[68569]: DEBUG oslo_vmware.api [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Task: {'id': task-3167889, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171683} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.046567] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1210.046763] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1210.046956] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1210.047104] env[68569]: INFO nova.compute.manager [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1210.047361] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1210.047554] env[68569]: DEBUG nova.compute.manager [-] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1210.047648] env[68569]: DEBUG nova.network.neutron [-] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1210.240535] env[68569]: DEBUG oslo_concurrency.lockutils [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1210.331588] env[68569]: DEBUG oslo_vmware.api [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167887, 'name': ReconfigVM_Task, 'duration_secs': 0.532467} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.331989] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Reconfigured VM instance instance-0000006d to attach disk [datastore1] volume-7bfe5b96-5486-4bab-8423-8c0ad45bec92/volume-7bfe5b96-5486-4bab-8423-8c0ad45bec92.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1210.337610] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-deb39e3a-18b1-4cab-9b37-20d48e7d4ba3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.356483] env[68569]: DEBUG oslo_vmware.api [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1210.356483] env[68569]: value = "task-3167890" [ 1210.356483] env[68569]: _type = "Task" [ 1210.356483] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.366857] env[68569]: DEBUG oslo_vmware.api [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167890, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.384104] env[68569]: ERROR nova.compute.manager [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Setting instance vm_state to ERROR: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1210.384104] env[68569]: ERROR nova.compute.manager [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Traceback (most recent call last): [ 1210.384104] env[68569]: ERROR nova.compute.manager [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] File "/opt/stack/nova/nova/compute/manager.py", line 11478, in _error_out_instance_on_exception [ 1210.384104] env[68569]: ERROR nova.compute.manager [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] yield [ 1210.384104] env[68569]: ERROR nova.compute.manager [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 1210.384104] env[68569]: ERROR nova.compute.manager [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] disk_info = self.driver.migrate_disk_and_power_off( [ 1210.384104] env[68569]: ERROR nova.compute.manager [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1210.384104] env[68569]: ERROR nova.compute.manager [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] [ 1210.430482] env[68569]: DEBUG nova.compute.manager [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1210.430873] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1210.431420] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66b68f6e-22f0-41e8-8dbc-dad9e7920ac9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.436905] env[68569]: ERROR nova.scheduler.client.report [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [req-c90e5479-8ea4-4323-91d5-21df22469ddc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c90e5479-8ea4-4323-91d5-21df22469ddc"}]} [ 1210.444040] env[68569]: DEBUG oslo_vmware.api [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1210.444040] env[68569]: value = "task-3167891" [ 1210.444040] env[68569]: _type = "Task" [ 1210.444040] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.456097] env[68569]: DEBUG oslo_vmware.api [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167891, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.457401] env[68569]: DEBUG nova.scheduler.client.report [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1210.475445] env[68569]: DEBUG nova.scheduler.client.report [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1210.475957] env[68569]: DEBUG nova.compute.provider_tree [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1210.500323] env[68569]: DEBUG nova.scheduler.client.report [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1210.530230] env[68569]: DEBUG nova.scheduler.client.report [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1210.717473] env[68569]: DEBUG nova.compute.manager [req-6a8abfad-b957-44cb-a4e3-65aa4205f548 req-2f2d506c-65d9-4445-a418-8b7595700e5b service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Received event network-vif-deleted-66a22ce3-2444-47fc-aaf8-741e718c05f0 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1210.717473] env[68569]: INFO nova.compute.manager [req-6a8abfad-b957-44cb-a4e3-65aa4205f548 req-2f2d506c-65d9-4445-a418-8b7595700e5b service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Neutron deleted interface 66a22ce3-2444-47fc-aaf8-741e718c05f0; detaching it from the instance and deleting it from the info cache [ 1210.717473] env[68569]: DEBUG nova.network.neutron [req-6a8abfad-b957-44cb-a4e3-65aa4205f548 req-2f2d506c-65d9-4445-a418-8b7595700e5b service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.753790] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f186a927-af40-4d5c-85fb-13b034eefec1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.764662] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2129b71-f4ac-4d56-a329-ca84dc3f6ccb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.797765] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7e7d28-ed7c-41f4-8061-5e43fff6a600 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.811620] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bacdce1-0d0b-4a09-bddf-69a318f287f0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.829535] env[68569]: DEBUG nova.compute.provider_tree [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1210.871191] env[68569]: DEBUG oslo_vmware.api [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167890, 'name': ReconfigVM_Task, 'duration_secs': 0.238696} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.871191] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633739', 'volume_id': '7bfe5b96-5486-4bab-8423-8c0ad45bec92', 'name': 'volume-7bfe5b96-5486-4bab-8423-8c0ad45bec92', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5de9a459-a2a2-4d78-9a66-cf819e8893b6', 'attached_at': '', 'detached_at': '', 'volume_id': '7bfe5b96-5486-4bab-8423-8c0ad45bec92', 'serial': '7bfe5b96-5486-4bab-8423-8c0ad45bec92'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1210.905161] env[68569]: INFO nova.compute.manager [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Swapping old allocation on dict_keys(['a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6']) held by migration bd7a4e8c-4879-407d-b0f1-217867c20528 for instance [ 1210.934405] env[68569]: DEBUG nova.scheduler.client.report [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Overwriting current allocation {'allocations': {'a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 176}}, 'project_id': 'aa0ab47201c64b0d87480d4ff90014f5', 'user_id': '73b1c309d1494888945f033a8c5140a5', 'consumer_generation': 1} on consumer 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d {{(pid=68569) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1210.955782] env[68569]: DEBUG oslo_vmware.api [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167891, 'name': PowerOffVM_Task, 'duration_secs': 0.309799} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.956132] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1210.956372] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1210.956599] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633728', 'volume_id': '5f7eaca7-05fb-4187-bfc2-ecf09e846667', 'name': 'volume-5f7eaca7-05fb-4187-bfc2-ecf09e846667', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7289fe12-4f1f-488f-9be2-a7cb666727b3', 'attached_at': '', 'detached_at': '', 'volume_id': '5f7eaca7-05fb-4187-bfc2-ecf09e846667', 'serial': '5f7eaca7-05fb-4187-bfc2-ecf09e846667'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1210.957444] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec292a3-bf21-4105-bcac-baa2256e7872 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.978772] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f9f1cf-cee9-41f9-9150-6e3e5f7ee514 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.986851] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8d483a-fa10-489c-b333-de46e60ddf9c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.007620] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32e6b2a-8999-4d74-a044-0f3574f636ef {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.028664] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] The volume has not been displaced from its original location: [datastore2] volume-5f7eaca7-05fb-4187-bfc2-ecf09e846667/volume-5f7eaca7-05fb-4187-bfc2-ecf09e846667.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1211.034224] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Reconfiguring VM instance instance-00000072 to detach disk 2000 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1211.035311] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13828891-ef63-4988-9cfa-0572683f45f5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.054963] env[68569]: DEBUG oslo_vmware.api [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1211.054963] env[68569]: value = "task-3167892" [ 1211.054963] env[68569]: _type = "Task" [ 1211.054963] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.066208] env[68569]: DEBUG oslo_vmware.api [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167892, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.197345] env[68569]: DEBUG nova.network.neutron [-] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.222710] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-78b305aa-5af6-40a4-95f1-e06662272b95 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.234369] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c27ea7-4e82-4b4b-81c2-a553cd16bc08 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.272745] env[68569]: DEBUG nova.compute.manager [req-6a8abfad-b957-44cb-a4e3-65aa4205f548 req-2f2d506c-65d9-4445-a418-8b7595700e5b service nova] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Detach interface failed, port_id=66a22ce3-2444-47fc-aaf8-741e718c05f0, reason: Instance 09f4018b-f1cd-4726-b871-b110a7cf1b43 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1211.306054] env[68569]: DEBUG oslo_concurrency.lockutils [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "7289fe12-4f1f-488f-9be2-a7cb666727b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.357022] env[68569]: ERROR nova.scheduler.client.report [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] [req-d107d09b-6a40-4beb-ab90-68deea5477b4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d107d09b-6a40-4beb-ab90-68deea5477b4"}]} [ 1211.375236] env[68569]: DEBUG nova.scheduler.client.report [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1211.403062] env[68569]: DEBUG nova.scheduler.client.report [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1211.403062] env[68569]: DEBUG nova.compute.provider_tree [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 128, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1211.421018] env[68569]: DEBUG nova.scheduler.client.report [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1211.453177] env[68569]: DEBUG nova.scheduler.client.report [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1211.569382] env[68569]: DEBUG oslo_vmware.api [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167892, 'name': ReconfigVM_Task, 'duration_secs': 0.233967} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.570407] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Reconfigured VM instance instance-00000072 to detach disk 2000 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1211.574438] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea71fa7c-f825-48e7-ac9f-c1aa97900eaf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.597403] env[68569]: DEBUG oslo_vmware.api [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1211.597403] env[68569]: value = "task-3167893" [ 1211.597403] env[68569]: _type = "Task" [ 1211.597403] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.608050] env[68569]: DEBUG oslo_vmware.api [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167893, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.661951] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-308e0813-ee74-408a-9963-266768012841 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.672132] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a4906e-926e-41aa-90f7-ff4057749408 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.709092] env[68569]: INFO nova.compute.manager [-] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Took 1.66 seconds to deallocate network for instance. [ 1211.712456] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a4dd91b-1505-4aa4-a9c1-90095ca172e4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.726314] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-168a60f4-13e5-414c-bfe4-80f0ae4e2cca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.741415] env[68569]: DEBUG nova.compute.provider_tree [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1211.930669] env[68569]: DEBUG nova.objects.instance [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lazy-loading 'flavor' on Instance uuid 5de9a459-a2a2-4d78-9a66-cf819e8893b6 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.108072] env[68569]: DEBUG oslo_vmware.api [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167893, 'name': ReconfigVM_Task, 'duration_secs': 0.19582} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.108576] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633728', 'volume_id': '5f7eaca7-05fb-4187-bfc2-ecf09e846667', 'name': 'volume-5f7eaca7-05fb-4187-bfc2-ecf09e846667', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7289fe12-4f1f-488f-9be2-a7cb666727b3', 'attached_at': '', 'detached_at': '', 'volume_id': '5f7eaca7-05fb-4187-bfc2-ecf09e846667', 'serial': '5f7eaca7-05fb-4187-bfc2-ecf09e846667'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1212.108895] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1212.109764] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c8838a-1d39-4090-8053-68010119b7b8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.118409] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1212.118670] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ab1a88e-a125-43b0-aa54-8fe5776b45f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.187578] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1212.187946] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1212.188277] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleting the datastore file [datastore2] 7289fe12-4f1f-488f-9be2-a7cb666727b3 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1212.188670] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a5106335-b62a-4c7c-9d99-2069fe182182 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.200384] env[68569]: DEBUG oslo_vmware.api [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1212.200384] env[68569]: value = "task-3167895" [ 1212.200384] env[68569]: _type = "Task" [ 1212.200384] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.212871] env[68569]: DEBUG oslo_vmware.api [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167895, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.221483] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.276321] env[68569]: DEBUG nova.scheduler.client.report [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Updated inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with generation 177 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1212.276740] env[68569]: DEBUG nova.compute.provider_tree [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Updating resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 generation from 177 to 178 during operation: update_inventory {{(pid=68569) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1212.276876] env[68569]: DEBUG nova.compute.provider_tree [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1212.349704] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.440977] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a416456c-af05-41cf-b79e-814339c7c094 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.820s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.440977] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.091s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.521843] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.596278] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "3363dac8-c5df-405e-8bdc-9002e2d45e05" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.596605] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "3363dac8-c5df-405e-8bdc-9002e2d45e05" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.596771] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "3363dac8-c5df-405e-8bdc-9002e2d45e05-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.596975] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "3363dac8-c5df-405e-8bdc-9002e2d45e05-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.597190] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "3363dac8-c5df-405e-8bdc-9002e2d45e05-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.599413] env[68569]: INFO nova.compute.manager [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Terminating instance [ 1212.711869] env[68569]: DEBUG oslo_vmware.api [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167895, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10399} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.712326] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1212.712522] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1212.712710] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1212.712894] env[68569]: INFO nova.compute.manager [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Took 2.28 seconds to destroy the instance on the hypervisor. [ 1212.713166] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1212.713358] env[68569]: DEBUG nova.compute.manager [-] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1212.713450] env[68569]: DEBUG nova.network.neutron [-] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1212.781477] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.634s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.783822] env[68569]: DEBUG oslo_concurrency.lockutils [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.544s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.806647] env[68569]: INFO nova.scheduler.client.report [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Deleted allocations for instance db75de86-9dda-42b2-9e7a-55e2ba5adad1 [ 1212.943269] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2edfb443-7441-4ac9-85e4-5bb2992723b7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.946639] env[68569]: INFO nova.compute.manager [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Detaching volume 5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e [ 1212.957060] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90cedfc-bd12-4eed-89c6-2f266c0b8f13 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.992861] env[68569]: INFO nova.virt.block_device [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Attempting to driver detach volume 5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e from mountpoint /dev/sdb [ 1212.993159] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1212.993369] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633736', 'volume_id': '5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e', 'name': 'volume-5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5de9a459-a2a2-4d78-9a66-cf819e8893b6', 'attached_at': '', 'detached_at': '', 'volume_id': '5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e', 'serial': '5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1212.995497] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ce452a-ea43-4075-ba7c-7038d2c5d09b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.998672] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f932cb-86a5-49be-a37c-010d7ed0a04b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.030717] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dffe9b85-3e77-4a37-be6c-c7427f6c1cf3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.036423] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911fe0ff-6632-4eaa-84e3-97e0d4917ccd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.051556] env[68569]: DEBUG nova.compute.provider_tree [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1213.057080] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508f1810-e46f-44ba-9ead-c37753d6fa67 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.082246] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a82825-88ea-4241-834d-3d62eb85ec55 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.100900] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] The volume has not been displaced from its original location: [datastore2] volume-5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e/volume-5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1213.106013] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Reconfiguring VM instance instance-0000006d to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1213.106747] env[68569]: DEBUG nova.compute.manager [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1213.107102] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1213.107279] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b7f8692-e489-4c85-8d38-5ac8d76f8b41 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.121068] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cebf2de2-e213-486c-99c2-500b6cec0ea4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.131766] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1213.133131] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-148a862a-f83a-49b3-8b56-1525847f15ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.135384] env[68569]: DEBUG oslo_vmware.api [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1213.135384] env[68569]: value = "task-3167896" [ 1213.135384] env[68569]: _type = "Task" [ 1213.135384] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.140400] env[68569]: DEBUG oslo_vmware.api [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1213.140400] env[68569]: value = "task-3167897" [ 1213.140400] env[68569]: _type = "Task" [ 1213.140400] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.147521] env[68569]: DEBUG oslo_vmware.api [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167896, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.154960] env[68569]: DEBUG oslo_vmware.api [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167897, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.182660] env[68569]: DEBUG nova.compute.manager [req-3d9839cd-b1ae-4ca7-a734-c2399d07e074 req-60ac971e-5b76-4fef-a8d3-01958ad46136 service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Received event network-vif-deleted-518ea6b8-1dba-4b9b-aa1e-73186cca17de {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1213.182870] env[68569]: INFO nova.compute.manager [req-3d9839cd-b1ae-4ca7-a734-c2399d07e074 req-60ac971e-5b76-4fef-a8d3-01958ad46136 service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Neutron deleted interface 518ea6b8-1dba-4b9b-aa1e-73186cca17de; detaching it from the instance and deleting it from the info cache [ 1213.183284] env[68569]: DEBUG nova.network.neutron [req-3d9839cd-b1ae-4ca7-a734-c2399d07e074 req-60ac971e-5b76-4fef-a8d3-01958ad46136 service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.314335] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7f985324-0b1e-4a58-8687-bb9ed674cd19 tempest-ServersV294TestFqdnHostnames-1673476702 tempest-ServersV294TestFqdnHostnames-1673476702-project-member] Lock "db75de86-9dda-42b2-9e7a-55e2ba5adad1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.840s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.505685] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.505997] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.506255] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "47fa6c6a-a31f-4eea-86b0-807dba6a6b4d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.506470] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "47fa6c6a-a31f-4eea-86b0-807dba6a6b4d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.506679] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "47fa6c6a-a31f-4eea-86b0-807dba6a6b4d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.509321] env[68569]: INFO nova.compute.manager [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Terminating instance [ 1213.556163] env[68569]: DEBUG nova.scheduler.client.report [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1213.648334] env[68569]: DEBUG oslo_vmware.api [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167896, 'name': ReconfigVM_Task, 'duration_secs': 0.344777} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.649397] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Reconfigured VM instance instance-0000006d to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1213.656682] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f38a078-3d28-4ffa-a1a2-ecd7c5c4e520 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.666950] env[68569]: DEBUG nova.network.neutron [-] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.668360] env[68569]: DEBUG oslo_vmware.api [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167897, 'name': PowerOffVM_Task, 'duration_secs': 0.253213} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.668717] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1213.668988] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1213.669600] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05fe32ba-745f-4597-adc1-7a2161dcd18c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.677537] env[68569]: DEBUG oslo_vmware.api [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1213.677537] env[68569]: value = "task-3167898" [ 1213.677537] env[68569]: _type = "Task" [ 1213.677537] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.690015] env[68569]: DEBUG oslo_vmware.api [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167898, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.690259] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa081516-1ffd-4a51-ac5e-8261b7031d6c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.700154] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a7db5e-b8ff-4771-8de5-a14de25f1255 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.735219] env[68569]: DEBUG nova.compute.manager [req-3d9839cd-b1ae-4ca7-a734-c2399d07e074 req-60ac971e-5b76-4fef-a8d3-01958ad46136 service nova] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Detach interface failed, port_id=518ea6b8-1dba-4b9b-aa1e-73186cca17de, reason: Instance 7289fe12-4f1f-488f-9be2-a7cb666727b3 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1214.013714] env[68569]: DEBUG nova.compute.manager [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1214.013965] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1214.014885] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2fdbdc-9075-4e81-96f7-f380694f7dad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.024056] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1214.024342] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48e1860f-4749-4a9d-9016-a74e80d7a948 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.033102] env[68569]: DEBUG oslo_vmware.api [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1214.033102] env[68569]: value = "task-3167900" [ 1214.033102] env[68569]: _type = "Task" [ 1214.033102] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.042404] env[68569]: DEBUG oslo_vmware.api [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167900, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.060433] env[68569]: DEBUG oslo_concurrency.lockutils [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.277s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.060630] env[68569]: INFO nova.compute.manager [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Successfully reverted task state from resize_migrating on failure for instance. [ 1214.067369] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.846s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.067598] env[68569]: DEBUG nova.objects.instance [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lazy-loading 'resources' on Instance uuid 09f4018b-f1cd-4726-b871-b110a7cf1b43 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server [None req-67e8e986-0b5f-4b46-86ac-cfddcfe730fa tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Exception during message handling: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 168, in decorated_function [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 159, in decorated_function [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 215, in decorated_function [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 205, in decorated_function [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6583, in resize_instance [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6580, in resize_instance [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server self._resize_instance(context, instance, image, migration, [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server disk_info = self.driver.migrate_disk_and_power_off( [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1214.069190] env[68569]: ERROR oslo_messaging.rpc.server [ 1214.170961] env[68569]: INFO nova.compute.manager [-] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Took 1.46 seconds to deallocate network for instance. [ 1214.191823] env[68569]: DEBUG oslo_vmware.api [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167898, 'name': ReconfigVM_Task, 'duration_secs': 0.193652} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.191823] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633736', 'volume_id': '5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e', 'name': 'volume-5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5de9a459-a2a2-4d78-9a66-cf819e8893b6', 'attached_at': '', 'detached_at': '', 'volume_id': '5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e', 'serial': '5a5b10ba-93a0-4da8-bdab-0b2e1a99aa0e'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1214.544429] env[68569]: DEBUG oslo_vmware.api [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167900, 'name': PowerOffVM_Task, 'duration_secs': 0.399922} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.544754] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1214.544803] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1214.545109] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b48bdcb3-d27e-41a8-b25e-8011513a11e3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.712347] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376fab26-e207-46b8-8488-359fa8a48d0a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.718572] env[68569]: INFO nova.compute.manager [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Took 0.55 seconds to detach 1 volumes for instance. [ 1214.722478] env[68569]: DEBUG nova.compute.manager [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Deleting volume: 5f7eaca7-05fb-4187-bfc2-ecf09e846667 {{(pid=68569) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1214.725126] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441a9376-f8fa-415d-896f-5da2f1e8ba76 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.760612] env[68569]: DEBUG nova.objects.instance [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lazy-loading 'flavor' on Instance uuid 5de9a459-a2a2-4d78-9a66-cf819e8893b6 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1214.762313] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f4a82c-29aa-4b61-a5b6-8618b3d23643 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.775724] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddeaf760-9d96-4950-8d35-3ec7401ec9d1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.791813] env[68569]: DEBUG nova.compute.provider_tree [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1214.798216] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1214.798216] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1214.798216] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleting the datastore file [datastore1] 3363dac8-c5df-405e-8bdc-9002e2d45e05 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1214.798216] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a53c641-d618-418d-9ee4-437c8264a6c6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.800300] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1214.800528] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1214.800702] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Deleting the datastore file [datastore1] 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1214.801319] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-52613d23-33d0-4a34-b3c1-8ca7e95ec415 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.810439] env[68569]: DEBUG oslo_vmware.api [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1214.810439] env[68569]: value = "task-3167902" [ 1214.810439] env[68569]: _type = "Task" [ 1214.810439] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.812578] env[68569]: DEBUG oslo_vmware.api [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1214.812578] env[68569]: value = "task-3167903" [ 1214.812578] env[68569]: _type = "Task" [ 1214.812578] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.828998] env[68569]: DEBUG oslo_vmware.api [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.829305] env[68569]: DEBUG oslo_vmware.api [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167903, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.272557] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.298881] env[68569]: DEBUG nova.scheduler.client.report [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1215.329028] env[68569]: DEBUG oslo_vmware.api [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167902, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169323} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.329028] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1215.329278] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1215.329342] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1215.329507] env[68569]: INFO nova.compute.manager [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Took 2.22 seconds to destroy the instance on the hypervisor. [ 1215.329736] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1215.329945] env[68569]: DEBUG oslo_vmware.api [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167903, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180873} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.330188] env[68569]: DEBUG nova.compute.manager [-] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1215.330244] env[68569]: DEBUG nova.network.neutron [-] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1215.332181] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1215.332376] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1215.332551] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1215.332711] env[68569]: INFO nova.compute.manager [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1215.332925] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1215.333122] env[68569]: DEBUG nova.compute.manager [-] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1215.333214] env[68569]: DEBUG nova.network.neutron [-] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1215.573111] env[68569]: DEBUG oslo_vmware.rw_handles [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521b44be-8878-61d3-0aeb-1d2556bea601/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1215.574174] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8d7d13-fbd9-4a83-ba5c-4af5279269b1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.582171] env[68569]: DEBUG oslo_vmware.rw_handles [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521b44be-8878-61d3-0aeb-1d2556bea601/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1215.582344] env[68569]: ERROR oslo_vmware.rw_handles [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521b44be-8878-61d3-0aeb-1d2556bea601/disk-0.vmdk due to incomplete transfer. [ 1215.582636] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-cbe0a72c-14d0-4485-9bd0-ff02592bf08c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.593040] env[68569]: DEBUG oslo_vmware.rw_handles [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521b44be-8878-61d3-0aeb-1d2556bea601/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1215.593375] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Uploaded image f54a62d2-7369-49fc-a0f4-87115e866c51 to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1215.596651] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1215.596981] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a14a53e9-1e8f-4de8-9aaa-39a5644f9dec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.605348] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1215.605348] env[68569]: value = "task-3167905" [ 1215.605348] env[68569]: _type = "Task" [ 1215.605348] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.614862] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167905, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.652553] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.777492] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a893a769-ea9f-439e-bf9e-9fab56e950c1 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.337s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.778533] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.126s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1215.803203] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.736s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.806770] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.285s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1215.832111] env[68569]: INFO nova.scheduler.client.report [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Deleted allocations for instance 09f4018b-f1cd-4726-b871-b110a7cf1b43 [ 1215.978464] env[68569]: DEBUG nova.compute.manager [req-c96f25aa-3372-44e1-bbc2-02a535f676d7 req-10412ef2-8718-4684-ba61-763074cda47a service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Received event network-vif-deleted-5b205295-7e30-4590-b967-0d6e1f4d416a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1215.978464] env[68569]: INFO nova.compute.manager [req-c96f25aa-3372-44e1-bbc2-02a535f676d7 req-10412ef2-8718-4684-ba61-763074cda47a service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Neutron deleted interface 5b205295-7e30-4590-b967-0d6e1f4d416a; detaching it from the instance and deleting it from the info cache [ 1215.978464] env[68569]: DEBUG nova.network.neutron [req-c96f25aa-3372-44e1-bbc2-02a535f676d7 req-10412ef2-8718-4684-ba61-763074cda47a service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.982919] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca4e65c-6a3b-4a96-8320-74cf8d63d3f8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.992979] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a57df0-2b4f-48f4-8020-b11faecbb8a1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.031287] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6500f6-273e-4726-9789-2cd0526e49c2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.043422] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0d267b-3a6b-4335-b327-f0a7a0bddc3d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.048700] env[68569]: DEBUG nova.compute.manager [req-c827243b-a9d5-43ef-b0ec-fedc49055882 req-26eee76b-a9ba-4322-b118-fc216d43382a service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Received event network-vif-deleted-2e2d14d6-2ae7-45de-a9ee-885c117167ee {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1216.049196] env[68569]: INFO nova.compute.manager [req-c827243b-a9d5-43ef-b0ec-fedc49055882 req-26eee76b-a9ba-4322-b118-fc216d43382a service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Neutron deleted interface 2e2d14d6-2ae7-45de-a9ee-885c117167ee; detaching it from the instance and deleting it from the info cache [ 1216.049196] env[68569]: DEBUG nova.network.neutron [req-c827243b-a9d5-43ef-b0ec-fedc49055882 req-26eee76b-a9ba-4322-b118-fc216d43382a service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.062658] env[68569]: DEBUG nova.compute.provider_tree [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1216.116335] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167905, 'name': Destroy_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.285168] env[68569]: INFO nova.compute.manager [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Detaching volume 7bfe5b96-5486-4bab-8423-8c0ad45bec92 [ 1216.320686] env[68569]: INFO nova.virt.block_device [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Attempting to driver detach volume 7bfe5b96-5486-4bab-8423-8c0ad45bec92 from mountpoint /dev/sdc [ 1216.320955] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1216.321169] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633739', 'volume_id': '7bfe5b96-5486-4bab-8423-8c0ad45bec92', 'name': 'volume-7bfe5b96-5486-4bab-8423-8c0ad45bec92', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5de9a459-a2a2-4d78-9a66-cf819e8893b6', 'attached_at': '', 'detached_at': '', 'volume_id': '7bfe5b96-5486-4bab-8423-8c0ad45bec92', 'serial': '7bfe5b96-5486-4bab-8423-8c0ad45bec92'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1216.322035] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b997ee-3047-468c-811f-33a7f8afd666 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.349573] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6abac9-8874-4d46-87b0-875a98b1146a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.352781] env[68569]: DEBUG oslo_concurrency.lockutils [None req-48cebdef-61ac-403b-acc7-74b39d7a4f40 tempest-AttachVolumeNegativeTest-1635086091 tempest-AttachVolumeNegativeTest-1635086091-project-member] Lock "09f4018b-f1cd-4726-b871-b110a7cf1b43" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.942s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.359024] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d3c8a4-c21e-4a5d-bbe9-0b386084ed84 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.380279] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd9322b-2515-4630-a88f-f634692032de {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.396664] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] The volume has not been displaced from its original location: [datastore1] volume-7bfe5b96-5486-4bab-8423-8c0ad45bec92/volume-7bfe5b96-5486-4bab-8423-8c0ad45bec92.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1216.402027] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Reconfiguring VM instance instance-0000006d to detach disk 2002 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1216.402304] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-690a1044-ce7c-406f-9526-39c41150560b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.420493] env[68569]: DEBUG oslo_vmware.api [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1216.420493] env[68569]: value = "task-3167906" [ 1216.420493] env[68569]: _type = "Task" [ 1216.420493] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.428967] env[68569]: DEBUG oslo_vmware.api [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167906, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.448985] env[68569]: DEBUG nova.network.neutron [-] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.481321] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aee95ec8-3d2d-4304-84ad-75a9960ab046 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.492572] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3424f91b-269e-4d3e-9a68-06fcb9b4e8b6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.530515] env[68569]: DEBUG nova.compute.manager [req-c96f25aa-3372-44e1-bbc2-02a535f676d7 req-10412ef2-8718-4684-ba61-763074cda47a service nova] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Detach interface failed, port_id=5b205295-7e30-4590-b967-0d6e1f4d416a, reason: Instance 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1216.531072] env[68569]: DEBUG nova.network.neutron [-] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.552855] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-40f43b78-de23-47b2-9bc1-c985105deb8b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.563450] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dce700f-3f17-441a-b3b7-073fe6dac32f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.574821] env[68569]: DEBUG nova.scheduler.client.report [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1216.600650] env[68569]: DEBUG nova.compute.manager [req-c827243b-a9d5-43ef-b0ec-fedc49055882 req-26eee76b-a9ba-4322-b118-fc216d43382a service nova] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Detach interface failed, port_id=2e2d14d6-2ae7-45de-a9ee-885c117167ee, reason: Instance 3363dac8-c5df-405e-8bdc-9002e2d45e05 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1216.615503] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167905, 'name': Destroy_Task, 'duration_secs': 0.741059} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.615752] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Destroyed the VM [ 1216.616051] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1216.616306] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-21d1141c-f3c6-4e29-88ee-d633548e96f8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.624250] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1216.624250] env[68569]: value = "task-3167907" [ 1216.624250] env[68569]: _type = "Task" [ 1216.624250] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.633346] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167907, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.932683] env[68569]: DEBUG oslo_vmware.api [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167906, 'name': ReconfigVM_Task, 'duration_secs': 0.288559} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.932683] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Reconfigured VM instance instance-0000006d to detach disk 2002 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1216.936234] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-badc0ee0-a71f-44ea-bf5a-c9366b8853c7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.951318] env[68569]: INFO nova.compute.manager [-] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Took 1.62 seconds to deallocate network for instance. [ 1216.954858] env[68569]: DEBUG oslo_vmware.api [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1216.954858] env[68569]: value = "task-3167908" [ 1216.954858] env[68569]: _type = "Task" [ 1216.954858] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.967653] env[68569]: DEBUG oslo_vmware.api [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167908, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.033772] env[68569]: INFO nova.compute.manager [-] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Took 1.70 seconds to deallocate network for instance. [ 1217.080800] env[68569]: DEBUG oslo_concurrency.lockutils [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.274s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.081033] env[68569]: INFO nova.compute.manager [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Successfully reverted task state from resize_migrating on failure for instance. [ 1217.088840] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.816s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.088840] env[68569]: DEBUG nova.objects.instance [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lazy-loading 'resources' on Instance uuid 7289fe12-4f1f-488f-9be2-a7cb666727b3 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server [None req-e81ee92b-ce4f-42df-91f0-4d4b9cb3e92f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Exception during message handling: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 168, in decorated_function [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 159, in decorated_function [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 215, in decorated_function [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 205, in decorated_function [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6583, in resize_instance [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6580, in resize_instance [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server self._resize_instance(context, instance, image, migration, [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server disk_info = self.driver.migrate_disk_and_power_off( [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1217.090307] env[68569]: ERROR oslo_messaging.rpc.server [ 1217.135304] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167907, 'name': RemoveSnapshot_Task} progress is 56%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.464964] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.471140] env[68569]: DEBUG oslo_vmware.api [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167908, 'name': ReconfigVM_Task, 'duration_secs': 0.16287} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.471434] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633739', 'volume_id': '7bfe5b96-5486-4bab-8423-8c0ad45bec92', 'name': 'volume-7bfe5b96-5486-4bab-8423-8c0ad45bec92', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5de9a459-a2a2-4d78-9a66-cf819e8893b6', 'attached_at': '', 'detached_at': '', 'volume_id': '7bfe5b96-5486-4bab-8423-8c0ad45bec92', 'serial': '7bfe5b96-5486-4bab-8423-8c0ad45bec92'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1217.540869] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.592530] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquiring lock "c1b3fe48-aaf4-425a-a97c-e3c9a070db8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.592824] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Lock "c1b3fe48-aaf4-425a-a97c-e3c9a070db8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.637915] env[68569]: DEBUG oslo_vmware.api [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167907, 'name': RemoveSnapshot_Task, 'duration_secs': 0.601894} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.640364] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1217.640597] env[68569]: INFO nova.compute.manager [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Took 13.75 seconds to snapshot the instance on the hypervisor. [ 1217.732231] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90212e03-cd1d-4650-bb00-02847755775c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.740232] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7fc679-d4c6-47fa-b609-ac305199d068 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.770783] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d94a2b-a4c7-4d33-a514-18d8c2f3f670 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.778547] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac34c1cd-9f63-4c46-b101-1ccd81900c3b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.791614] env[68569]: DEBUG nova.compute.provider_tree [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1218.015207] env[68569]: DEBUG nova.objects.instance [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lazy-loading 'flavor' on Instance uuid 5de9a459-a2a2-4d78-9a66-cf819e8893b6 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1218.097053] env[68569]: DEBUG nova.compute.manager [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1218.183912] env[68569]: DEBUG nova.compute.manager [None req-79757b12-7b66-44da-9ccf-73c8d77cb56a tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Found 2 images (rotation: 2) {{(pid=68569) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1218.294806] env[68569]: DEBUG nova.scheduler.client.report [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1218.617648] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.799875] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.711s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.802276] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.337s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.802550] env[68569]: DEBUG nova.objects.instance [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lazy-loading 'resources' on Instance uuid 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1218.818493] env[68569]: INFO nova.scheduler.client.report [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleted allocations for instance 7289fe12-4f1f-488f-9be2-a7cb666727b3 [ 1218.931065] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "interface-6317f756-c9ed-4858-bb2a-c20d9f82f90d-9c043357-2161-4ad7-b828-4cc9886cbf9c" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.931065] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-6317f756-c9ed-4858-bb2a-c20d9f82f90d-9c043357-2161-4ad7-b828-4cc9886cbf9c" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.931065] env[68569]: DEBUG nova.objects.instance [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'flavor' on Instance uuid 6317f756-c9ed-4858-bb2a-c20d9f82f90d {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1219.024300] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7d27209c-a80e-4c05-af1f-530a375bf1b5 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.246s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.103511] env[68569]: DEBUG nova.compute.manager [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1219.105040] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b6607a-99e2-4b1e-990e-fea8ec27a930 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.326451] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5060c7e8-e40d-4216-a519-0a29cd60d626 tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "7289fe12-4f1f-488f-9be2-a7cb666727b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.404s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.327388] env[68569]: DEBUG oslo_concurrency.lockutils [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "7289fe12-4f1f-488f-9be2-a7cb666727b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 8.022s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.327610] env[68569]: DEBUG oslo_concurrency.lockutils [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "7289fe12-4f1f-488f-9be2-a7cb666727b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1219.327803] env[68569]: DEBUG oslo_concurrency.lockutils [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "7289fe12-4f1f-488f-9be2-a7cb666727b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.328039] env[68569]: DEBUG oslo_concurrency.lockutils [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "7289fe12-4f1f-488f-9be2-a7cb666727b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.329615] env[68569]: INFO nova.compute.manager [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Terminating instance [ 1219.416658] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb72b58-c034-46c8-aa66-979c369456c0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.424291] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85b1237-eb57-4913-93f3-34fb93127ba5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.458451] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8f9f40-a2da-490d-b607-88ec315bbd5c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.465836] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a7247b-3a31-400b-a05b-d9aa59a92904 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.480161] env[68569]: DEBUG nova.compute.provider_tree [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1219.561090] env[68569]: DEBUG nova.objects.instance [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'pci_requests' on Instance uuid 6317f756-c9ed-4858-bb2a-c20d9f82f90d {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1219.616324] env[68569]: INFO nova.compute.manager [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] instance snapshotting [ 1219.617191] env[68569]: DEBUG nova.objects.instance [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lazy-loading 'flavor' on Instance uuid fdcdd4b5-82bd-43c9-8865-807f86789a99 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1219.832942] env[68569]: DEBUG nova.compute.manager [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1219.833346] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-443fdce4-3180-4e3e-8580-b07f3def4e97 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.843511] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be0014e-b204-4024-8d36-2a2ab364367a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.876527] env[68569]: WARNING nova.virt.vmwareapi.driver [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 7289fe12-4f1f-488f-9be2-a7cb666727b3 could not be found. [ 1219.876759] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1219.876982] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2d6bb1ae-a3dc-482f-8fd9-8b77f6bc9812 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.885283] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32ac800-70e3-4cff-a8b8-4b5eae0ab270 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.914799] env[68569]: WARNING nova.virt.vmwareapi.vmops [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7289fe12-4f1f-488f-9be2-a7cb666727b3 could not be found. [ 1219.915023] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1219.915199] env[68569]: INFO nova.compute.manager [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1219.915457] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1219.915743] env[68569]: DEBUG nova.compute.manager [-] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1219.915838] env[68569]: DEBUG nova.network.neutron [-] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1219.983669] env[68569]: DEBUG nova.scheduler.client.report [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1220.060238] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.060612] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.060929] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.061169] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.061376] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.063261] env[68569]: DEBUG nova.objects.base [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Object Instance<6317f756-c9ed-4858-bb2a-c20d9f82f90d> lazy-loaded attributes: flavor,pci_requests {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1220.063454] env[68569]: DEBUG nova.network.neutron [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1220.065775] env[68569]: INFO nova.compute.manager [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Terminating instance [ 1220.123312] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4f9425-7143-4d38-bde8-952c91040d46 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.130063] env[68569]: DEBUG nova.policy [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b5878b8c7304fce9e150e9be38f10c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7613bcf5361d4b08a8d864e59b7fe858', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1220.149063] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59a5884-0d15-42d6-8ade-8f7231371fb7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.434673] env[68569]: DEBUG nova.network.neutron [-] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1220.489224] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.687s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.492018] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.951s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.492273] env[68569]: DEBUG nova.objects.instance [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lazy-loading 'resources' on Instance uuid 3363dac8-c5df-405e-8bdc-9002e2d45e05 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1220.512712] env[68569]: INFO nova.scheduler.client.report [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Deleted allocations for instance 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d [ 1220.570012] env[68569]: DEBUG nova.compute.manager [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1220.570325] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1220.571231] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24328176-9b1e-44f8-8633-83fad57367cb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.580113] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1220.580376] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e00705e-7e2c-44f7-a909-49b2b01b703d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.586625] env[68569]: DEBUG oslo_vmware.api [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1220.586625] env[68569]: value = "task-3167911" [ 1220.586625] env[68569]: _type = "Task" [ 1220.586625] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.595287] env[68569]: DEBUG oslo_vmware.api [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167911, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.660444] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1220.660699] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8227a83b-d89a-4188-bb77-b2de4ce2ac15 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.670741] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1220.670741] env[68569]: value = "task-3167912" [ 1220.670741] env[68569]: _type = "Task" [ 1220.670741] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.678768] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167912, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.937693] env[68569]: INFO nova.compute.manager [-] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Took 1.02 seconds to deallocate network for instance. [ 1220.952261] env[68569]: WARNING nova.volume.cinder [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Attachment 9c4b3829-2286-4db5-ae8e-655ad98d25c6 does not exist. Ignoring.: cinderclient.exceptions.NotFound: Volume attachment could not be found with filter: attachment_id = 9c4b3829-2286-4db5-ae8e-655ad98d25c6. (HTTP 404) (Request-ID: req-9a2cbb27-e8a8-4ff2-9c6e-f728ba815434) [ 1220.952539] env[68569]: INFO nova.compute.manager [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Took 0.01 seconds to detach 1 volumes for instance. [ 1220.954716] env[68569]: DEBUG nova.compute.manager [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Deleting volume: 5f7eaca7-05fb-4187-bfc2-ecf09e846667 {{(pid=68569) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1220.969667] env[68569]: WARNING nova.compute.manager [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Failed to delete volume: 5f7eaca7-05fb-4187-bfc2-ecf09e846667 due to Volume 5f7eaca7-05fb-4187-bfc2-ecf09e846667 could not be found.: nova.exception.VolumeNotFound: Volume 5f7eaca7-05fb-4187-bfc2-ecf09e846667 could not be found. [ 1221.021320] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5ff3e14d-9714-4840-9ed7-02cb542d5486 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "47fa6c6a-a31f-4eea-86b0-807dba6a6b4d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.515s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.098420] env[68569]: DEBUG oslo_vmware.api [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167911, 'name': PowerOffVM_Task, 'duration_secs': 0.255957} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.098677] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1221.098856] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1221.099143] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d807051b-1ae9-45cb-b7e2-8000805e9c63 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.114950] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e59605-ff25-461a-ac41-71f68a2d60f2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.122316] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6dc96b2-f4cf-4269-9bc3-59ff8542bdd0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.153993] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445d67ed-d672-4626-85b6-d2e05c703f13 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.161507] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19d8da4-a2f1-4f57-840d-32517a30e574 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.167967] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1221.168196] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1221.168374] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Deleting the datastore file [datastore2] 5de9a459-a2a2-4d78-9a66-cf819e8893b6 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1221.168596] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48220b74-a39c-492c-9c05-e5a35cab5944 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.177924] env[68569]: DEBUG nova.compute.provider_tree [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1221.183770] env[68569]: DEBUG oslo_vmware.api [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for the task: (returnval){ [ 1221.183770] env[68569]: value = "task-3167914" [ 1221.183770] env[68569]: _type = "Task" [ 1221.183770] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.190052] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167912, 'name': CreateSnapshot_Task, 'duration_secs': 0.460493} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.190611] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1221.192160] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231d08cd-c0a1-4c11-937a-debc071910ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.196468] env[68569]: DEBUG oslo_vmware.api [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167914, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.476476] env[68569]: INFO nova.compute.manager [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Instance disappeared during terminate [ 1221.476734] env[68569]: DEBUG oslo_concurrency.lockutils [None req-36b3933a-d02a-4a67-bb8e-309c7f11d85e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "7289fe12-4f1f-488f-9be2-a7cb666727b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.149s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.610862] env[68569]: DEBUG nova.network.neutron [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Successfully updated port: 9c043357-2161-4ad7-b828-4cc9886cbf9c {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1221.684607] env[68569]: DEBUG nova.scheduler.client.report [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1221.690184] env[68569]: DEBUG nova.compute.manager [req-fa22e337-198b-4829-9787-142d68c8df8d req-4b65a22a-8206-4abb-87a3-aa488dbdbc32 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Received event network-vif-plugged-9c043357-2161-4ad7-b828-4cc9886cbf9c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1221.690414] env[68569]: DEBUG oslo_concurrency.lockutils [req-fa22e337-198b-4829-9787-142d68c8df8d req-4b65a22a-8206-4abb-87a3-aa488dbdbc32 service nova] Acquiring lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.691039] env[68569]: DEBUG oslo_concurrency.lockutils [req-fa22e337-198b-4829-9787-142d68c8df8d req-4b65a22a-8206-4abb-87a3-aa488dbdbc32 service nova] Lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.691039] env[68569]: DEBUG oslo_concurrency.lockutils [req-fa22e337-198b-4829-9787-142d68c8df8d req-4b65a22a-8206-4abb-87a3-aa488dbdbc32 service nova] Lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.691039] env[68569]: DEBUG nova.compute.manager [req-fa22e337-198b-4829-9787-142d68c8df8d req-4b65a22a-8206-4abb-87a3-aa488dbdbc32 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] No waiting events found dispatching network-vif-plugged-9c043357-2161-4ad7-b828-4cc9886cbf9c {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1221.691285] env[68569]: WARNING nova.compute.manager [req-fa22e337-198b-4829-9787-142d68c8df8d req-4b65a22a-8206-4abb-87a3-aa488dbdbc32 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Received unexpected event network-vif-plugged-9c043357-2161-4ad7-b828-4cc9886cbf9c for instance with vm_state active and task_state None. [ 1221.700797] env[68569]: DEBUG oslo_vmware.api [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Task: {'id': task-3167914, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135864} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.701542] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1221.701721] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1221.701891] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1221.702068] env[68569]: INFO nova.compute.manager [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1221.702297] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1221.702484] env[68569]: DEBUG nova.compute.manager [-] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1221.702559] env[68569]: DEBUG nova.network.neutron [-] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1221.712306] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1221.712795] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-92599b06-c380-485b-86eb-ed4d653c6bb6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.720935] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1221.720935] env[68569]: value = "task-3167915" [ 1221.720935] env[68569]: _type = "Task" [ 1221.720935] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.728805] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167915, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.112922] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.113185] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1222.113329] env[68569]: DEBUG nova.network.neutron [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1222.193304] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.701s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.195870] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.579s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.197475] env[68569]: INFO nova.compute.claims [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1222.212294] env[68569]: INFO nova.scheduler.client.report [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleted allocations for instance 3363dac8-c5df-405e-8bdc-9002e2d45e05 [ 1222.230790] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167915, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.295172] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "e4fc902a-05c1-419c-9019-c22fa0f9ae9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.295416] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "e4fc902a-05c1-419c-9019-c22fa0f9ae9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.621716] env[68569]: DEBUG nova.network.neutron [-] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.651199] env[68569]: WARNING nova.network.neutron [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] 01780a40-9441-415d-988a-24afe70ba382 already exists in list: networks containing: ['01780a40-9441-415d-988a-24afe70ba382']. ignoring it [ 1222.719707] env[68569]: DEBUG oslo_concurrency.lockutils [None req-4c0f064a-deb5-4f0a-9750-42207de6c89e tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "3363dac8-c5df-405e-8bdc-9002e2d45e05" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.123s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.735160] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167915, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.798317] env[68569]: DEBUG nova.compute.manager [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1222.935442] env[68569]: DEBUG nova.network.neutron [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updating instance_info_cache with network_info: [{"id": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "address": "fa:16:3e:6d:8f:9f", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ca2f3e3-ce", "ovs_interfaceid": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9c043357-2161-4ad7-b828-4cc9886cbf9c", "address": "fa:16:3e:63:fe:1b", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c043357-21", "ovs_interfaceid": "9c043357-2161-4ad7-b828-4cc9886cbf9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.124865] env[68569]: INFO nova.compute.manager [-] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Took 1.42 seconds to deallocate network for instance. [ 1223.233785] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167915, 'name': CloneVM_Task, 'duration_secs': 1.405778} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.234056] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Created linked-clone VM from snapshot [ 1223.234746] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52cb991-0605-4a46-9df7-aed936383796 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.242773] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Uploading image 9d214233-2275-4569-9f11-1ab35d8c5299 {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1223.267073] env[68569]: DEBUG oslo_vmware.rw_handles [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1223.267073] env[68569]: value = "vm-633742" [ 1223.267073] env[68569]: _type = "VirtualMachine" [ 1223.267073] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1223.267365] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3f09e261-7237-4ace-8e62-62d7aebaea69 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.277167] env[68569]: DEBUG oslo_vmware.rw_handles [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lease: (returnval){ [ 1223.277167] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527235cc-a3ca-abec-8618-2a6988392e76" [ 1223.277167] env[68569]: _type = "HttpNfcLease" [ 1223.277167] env[68569]: } obtained for exporting VM: (result){ [ 1223.277167] env[68569]: value = "vm-633742" [ 1223.277167] env[68569]: _type = "VirtualMachine" [ 1223.277167] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1223.277167] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the lease: (returnval){ [ 1223.277167] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527235cc-a3ca-abec-8618-2a6988392e76" [ 1223.277167] env[68569]: _type = "HttpNfcLease" [ 1223.277167] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1223.284296] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1223.284296] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527235cc-a3ca-abec-8618-2a6988392e76" [ 1223.284296] env[68569]: _type = "HttpNfcLease" [ 1223.284296] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1223.317736] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.326566] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d9c933-86e8-4a43-a4c0-524247bed273 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.333916] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e65727-9672-4de8-a108-14b0b807a101 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.363059] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1ed57e-cd2e-461e-a1a8-792f90846bd9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.369856] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69333380-8622-4c20-bb38-90f3381725b5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.383609] env[68569]: DEBUG nova.compute.provider_tree [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1223.438298] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1223.438903] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.439083] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1223.439826] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd82b26-2ea5-494e-a457-a97630309068 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.455707] env[68569]: DEBUG nova.virt.hardware [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1223.456030] env[68569]: DEBUG nova.virt.hardware [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1223.456222] env[68569]: DEBUG nova.virt.hardware [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1223.456408] env[68569]: DEBUG nova.virt.hardware [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1223.456552] env[68569]: DEBUG nova.virt.hardware [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1223.456696] env[68569]: DEBUG nova.virt.hardware [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1223.456895] env[68569]: DEBUG nova.virt.hardware [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1223.457064] env[68569]: DEBUG nova.virt.hardware [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1223.457231] env[68569]: DEBUG nova.virt.hardware [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1223.457390] env[68569]: DEBUG nova.virt.hardware [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1223.457557] env[68569]: DEBUG nova.virt.hardware [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1223.463656] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Reconfiguring VM to attach interface {{(pid=68569) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1223.464197] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a82e400d-c3e5-47bd-835d-94c6120d894c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.480299] env[68569]: DEBUG oslo_vmware.api [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1223.480299] env[68569]: value = "task-3167918" [ 1223.480299] env[68569]: _type = "Task" [ 1223.480299] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.487619] env[68569]: DEBUG oslo_vmware.api [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167918, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.631289] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.718202] env[68569]: DEBUG nova.compute.manager [req-f07eb729-f6cc-49ff-8eb6-8b30ecf54fab req-e0b89398-9710-475e-964d-8c7ab3096cea service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Received event network-changed-9c043357-2161-4ad7-b828-4cc9886cbf9c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1223.718450] env[68569]: DEBUG nova.compute.manager [req-f07eb729-f6cc-49ff-8eb6-8b30ecf54fab req-e0b89398-9710-475e-964d-8c7ab3096cea service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Refreshing instance network info cache due to event network-changed-9c043357-2161-4ad7-b828-4cc9886cbf9c. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1223.718721] env[68569]: DEBUG oslo_concurrency.lockutils [req-f07eb729-f6cc-49ff-8eb6-8b30ecf54fab req-e0b89398-9710-475e-964d-8c7ab3096cea service nova] Acquiring lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.718925] env[68569]: DEBUG oslo_concurrency.lockutils [req-f07eb729-f6cc-49ff-8eb6-8b30ecf54fab req-e0b89398-9710-475e-964d-8c7ab3096cea service nova] Acquired lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1223.719150] env[68569]: DEBUG nova.network.neutron [req-f07eb729-f6cc-49ff-8eb6-8b30ecf54fab req-e0b89398-9710-475e-964d-8c7ab3096cea service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Refreshing network info cache for port 9c043357-2161-4ad7-b828-4cc9886cbf9c {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1223.784677] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1223.784677] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527235cc-a3ca-abec-8618-2a6988392e76" [ 1223.784677] env[68569]: _type = "HttpNfcLease" [ 1223.784677] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1223.785068] env[68569]: DEBUG oslo_vmware.rw_handles [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1223.785068] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527235cc-a3ca-abec-8618-2a6988392e76" [ 1223.785068] env[68569]: _type = "HttpNfcLease" [ 1223.785068] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1223.786095] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398667ef-d5d3-4142-8b3b-2568bc62b01c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.792952] env[68569]: DEBUG oslo_vmware.rw_handles [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52748f2a-0663-f2de-94cc-e76c92c24d56/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1223.793167] env[68569]: DEBUG oslo_vmware.rw_handles [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52748f2a-0663-f2de-94cc-e76c92c24d56/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1223.883164] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-63413e73-2b69-4787-9f21-e12aed7a9509 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.886156] env[68569]: DEBUG nova.scheduler.client.report [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1223.989974] env[68569]: DEBUG oslo_vmware.api [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167918, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.392343] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.196s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.392992] env[68569]: DEBUG nova.compute.manager [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1224.395686] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.078s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.397210] env[68569]: INFO nova.compute.claims [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1224.435633] env[68569]: DEBUG nova.network.neutron [req-f07eb729-f6cc-49ff-8eb6-8b30ecf54fab req-e0b89398-9710-475e-964d-8c7ab3096cea service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updated VIF entry in instance network info cache for port 9c043357-2161-4ad7-b828-4cc9886cbf9c. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1224.436658] env[68569]: DEBUG nova.network.neutron [req-f07eb729-f6cc-49ff-8eb6-8b30ecf54fab req-e0b89398-9710-475e-964d-8c7ab3096cea service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updating instance_info_cache with network_info: [{"id": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "address": "fa:16:3e:6d:8f:9f", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ca2f3e3-ce", "ovs_interfaceid": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9c043357-2161-4ad7-b828-4cc9886cbf9c", "address": "fa:16:3e:63:fe:1b", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c043357-21", "ovs_interfaceid": "9c043357-2161-4ad7-b828-4cc9886cbf9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.493746] env[68569]: DEBUG oslo_vmware.api [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167918, 'name': ReconfigVM_Task, 'duration_secs': 0.567676} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.494439] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1224.494767] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Reconfigured VM to attach interface {{(pid=68569) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1224.905042] env[68569]: DEBUG nova.compute.utils [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1224.909317] env[68569]: DEBUG nova.compute.manager [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Not allocating networking since 'none' was specified. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1224.939264] env[68569]: DEBUG oslo_concurrency.lockutils [req-f07eb729-f6cc-49ff-8eb6-8b30ecf54fab req-e0b89398-9710-475e-964d-8c7ab3096cea service nova] Releasing lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1224.940020] env[68569]: DEBUG nova.compute.manager [req-f07eb729-f6cc-49ff-8eb6-8b30ecf54fab req-e0b89398-9710-475e-964d-8c7ab3096cea service nova] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Received event network-vif-deleted-a6dc900f-8486-4e42-9982-a87df2d989b9 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1224.999612] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c8caf8ce-5b3b-42fd-bcbb-56b9371e5d37 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-6317f756-c9ed-4858-bb2a-c20d9f82f90d-9c043357-2161-4ad7-b828-4cc9886cbf9c" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.069s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.410260] env[68569]: DEBUG nova.compute.manager [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1225.531347] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80fd2f80-6a92-4561-9f5c-97ed1ce7882c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.539532] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac266b0-b040-441e-8f4a-6ecdda4fb48f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.571949] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e4bfed5-c2c6-4b96-a5ed-504a1ea25366 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.579583] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872d56a6-cb67-4ba5-93a9-fb7078e73d03 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.593061] env[68569]: DEBUG nova.compute.provider_tree [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1225.931121] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "61aa0997-ffa6-4551-bdaa-132026e240f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.931410] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "61aa0997-ffa6-4551-bdaa-132026e240f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.931616] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "61aa0997-ffa6-4551-bdaa-132026e240f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.931795] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "61aa0997-ffa6-4551-bdaa-132026e240f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.931959] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "61aa0997-ffa6-4551-bdaa-132026e240f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.934124] env[68569]: INFO nova.compute.manager [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Terminating instance [ 1226.096970] env[68569]: DEBUG nova.scheduler.client.report [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1226.425417] env[68569]: DEBUG nova.compute.manager [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1226.438696] env[68569]: DEBUG nova.compute.manager [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1226.439032] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1226.440515] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc8fdb8-504e-4327-bf5b-4e27ac1276b8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.451226] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1226.453668] env[68569]: DEBUG nova.virt.hardware [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1226.453930] env[68569]: DEBUG nova.virt.hardware [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1226.454143] env[68569]: DEBUG nova.virt.hardware [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1226.454397] env[68569]: DEBUG nova.virt.hardware [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1226.454583] env[68569]: DEBUG nova.virt.hardware [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1226.454791] env[68569]: DEBUG nova.virt.hardware [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1226.455050] env[68569]: DEBUG nova.virt.hardware [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1226.455256] env[68569]: DEBUG nova.virt.hardware [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1226.455496] env[68569]: DEBUG nova.virt.hardware [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1226.455693] env[68569]: DEBUG nova.virt.hardware [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1226.455936] env[68569]: DEBUG nova.virt.hardware [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1226.456309] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c8b7236-2baf-4dfb-b837-618110eb868b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.458573] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aaa04df-69ff-4c76-b260-98d916e51eb4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.467197] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1dca3d-1f6e-4909-97f7-055142913b84 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.471077] env[68569]: DEBUG oslo_vmware.api [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1226.471077] env[68569]: value = "task-3167920" [ 1226.471077] env[68569]: _type = "Task" [ 1226.471077] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.482467] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1226.488237] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Creating folder: Project (a219aacbe312437f809fae4ea9cb90d3). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1226.489052] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0e59110-6bb7-41b4-8949-2ea412a8d703 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.493730] env[68569]: DEBUG oslo_vmware.api [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167920, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.502382] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Created folder: Project (a219aacbe312437f809fae4ea9cb90d3) in parent group-v633430. [ 1226.502595] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Creating folder: Instances. Parent ref: group-v633743. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1226.502839] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ec84d35-45db-4416-abdb-8d748677d22f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.512454] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Created folder: Instances in parent group-v633743. [ 1226.512716] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1226.512906] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1226.513154] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb427414-216b-4f3b-b859-c498f4f5b1fa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.529847] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1226.529847] env[68569]: value = "task-3167923" [ 1226.529847] env[68569]: _type = "Task" [ 1226.529847] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.538829] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167923, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.602314] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.206s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.602942] env[68569]: DEBUG nova.compute.manager [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1226.606465] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.975s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.606823] env[68569]: DEBUG nova.objects.instance [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lazy-loading 'resources' on Instance uuid 5de9a459-a2a2-4d78-9a66-cf819e8893b6 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1226.650922] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "interface-6317f756-c9ed-4858-bb2a-c20d9f82f90d-9c043357-2161-4ad7-b828-4cc9886cbf9c" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.651192] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-6317f756-c9ed-4858-bb2a-c20d9f82f90d-9c043357-2161-4ad7-b828-4cc9886cbf9c" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.980583] env[68569]: DEBUG oslo_vmware.api [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167920, 'name': PowerOffVM_Task, 'duration_secs': 0.229425} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.980853] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1226.981167] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1226.981526] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a924562b-6b9e-4799-8ca0-a5e7d9b4e4f7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.042145] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167923, 'name': CreateVM_Task, 'duration_secs': 0.357295} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.042342] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1227.042761] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.042913] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.043507] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1227.043855] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e699b117-4b62-4bd8-acbf-63247b1220ab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.047739] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1227.047951] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1227.048847] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleting the datastore file [datastore1] 61aa0997-ffa6-4551-bdaa-132026e240f9 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1227.049566] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1db2ff57-ef33-440e-901b-303397517574 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.052629] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1227.052629] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b78bb7-b951-dd6c-91cd-998b15b22279" [ 1227.052629] env[68569]: _type = "Task" [ 1227.052629] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.057492] env[68569]: DEBUG oslo_vmware.api [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for the task: (returnval){ [ 1227.057492] env[68569]: value = "task-3167925" [ 1227.057492] env[68569]: _type = "Task" [ 1227.057492] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.064565] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b78bb7-b951-dd6c-91cd-998b15b22279, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.069847] env[68569]: DEBUG oslo_vmware.api [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.109692] env[68569]: DEBUG nova.compute.utils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1227.116760] env[68569]: DEBUG nova.compute.manager [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1227.116760] env[68569]: DEBUG nova.network.neutron [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1227.153366] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.153552] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.154436] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84067d4-de94-49e1-88e5-1ad51d32e09d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.182053] env[68569]: DEBUG nova.policy [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73b1c309d1494888945f033a8c5140a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa0ab47201c64b0d87480d4ff90014f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1227.186621] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08187ed9-fe7d-4144-bee4-46e102872885 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.218933] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Reconfiguring VM to detach interface {{(pid=68569) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1227.221904] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c320635b-e2ea-4a07-b063-37e4a5d7027b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.239768] env[68569]: DEBUG oslo_vmware.api [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1227.239768] env[68569]: value = "task-3167926" [ 1227.239768] env[68569]: _type = "Task" [ 1227.239768] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.249692] env[68569]: DEBUG oslo_vmware.api [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167926, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.279698] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e89e90b-1db1-4783-a41b-81b56f4e645a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.287101] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3906e1f5-a1ec-4c8a-b67d-5221381e0b65 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.320173] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca90a82-0b07-4a29-8b7e-0437b065b846 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.328867] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afaa1b9b-a35b-4c03-b7f0-8ac7369926a6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.342873] env[68569]: DEBUG nova.compute.provider_tree [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1227.565686] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b78bb7-b951-dd6c-91cd-998b15b22279, 'name': SearchDatastore_Task, 'duration_secs': 0.011833} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.566648] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1227.566789] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1227.566905] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.567062] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.567247] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1227.567556] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56b09efd-5c9e-4b35-b020-dc3106fbd297 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.572577] env[68569]: DEBUG oslo_vmware.api [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Task: {'id': task-3167925, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133349} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.573163] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1227.573340] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1227.573512] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1227.573678] env[68569]: INFO nova.compute.manager [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1227.573906] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1227.574122] env[68569]: DEBUG nova.compute.manager [-] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1227.574262] env[68569]: DEBUG nova.network.neutron [-] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1227.579712] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1227.579887] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1227.580842] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f451ae4-bb29-4498-8219-e09e6d96acf1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.587701] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1227.587701] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ef69d7-e333-43cf-39ec-103960f2e59d" [ 1227.587701] env[68569]: _type = "Task" [ 1227.587701] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.596502] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ef69d7-e333-43cf-39ec-103960f2e59d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.614500] env[68569]: DEBUG nova.compute.manager [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1227.739019] env[68569]: DEBUG nova.network.neutron [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Successfully created port: 631ce46a-8b09-43bc-a285-96eaf4e3bc0c {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1227.752114] env[68569]: DEBUG oslo_vmware.api [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.846577] env[68569]: DEBUG nova.scheduler.client.report [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1227.852294] env[68569]: DEBUG nova.compute.manager [req-5c815e4e-c89c-48d4-9785-1316d896a181 req-875a5d82-b17a-4454-a821-d783890bb79e service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Received event network-vif-deleted-5d59acab-5f9d-44bc-ac8c-231dda0ac182 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1227.852437] env[68569]: INFO nova.compute.manager [req-5c815e4e-c89c-48d4-9785-1316d896a181 req-875a5d82-b17a-4454-a821-d783890bb79e service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Neutron deleted interface 5d59acab-5f9d-44bc-ac8c-231dda0ac182; detaching it from the instance and deleting it from the info cache [ 1227.852608] env[68569]: DEBUG nova.network.neutron [req-5c815e4e-c89c-48d4-9785-1316d896a181 req-875a5d82-b17a-4454-a821-d783890bb79e service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.099633] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ef69d7-e333-43cf-39ec-103960f2e59d, 'name': SearchDatastore_Task, 'duration_secs': 0.00976} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.100643] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a6d68c1-7ae0-4470-b902-aea65bf0164b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.109018] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1228.109018] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ef6db3-835b-5794-b558-092a2d64c8ff" [ 1228.109018] env[68569]: _type = "Task" [ 1228.109018] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.117457] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ef6db3-835b-5794-b558-092a2d64c8ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.252501] env[68569]: DEBUG oslo_vmware.api [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.326660] env[68569]: DEBUG nova.network.neutron [-] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.360043] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.752s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.361540] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-05b7f940-5181-4b8e-8d2c-250f8b00ad54 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.371434] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591d693b-0916-40c2-a8c6-230db8538485 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.383922] env[68569]: INFO nova.scheduler.client.report [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Deleted allocations for instance 5de9a459-a2a2-4d78-9a66-cf819e8893b6 [ 1228.409130] env[68569]: DEBUG nova.compute.manager [req-5c815e4e-c89c-48d4-9785-1316d896a181 req-875a5d82-b17a-4454-a821-d783890bb79e service nova] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Detach interface failed, port_id=5d59acab-5f9d-44bc-ac8c-231dda0ac182, reason: Instance 61aa0997-ffa6-4551-bdaa-132026e240f9 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1228.618110] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ef6db3-835b-5794-b558-092a2d64c8ff, 'name': SearchDatastore_Task, 'duration_secs': 0.010792} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.618536] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1228.618889] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] c1b3fe48-aaf4-425a-a97c-e3c9a070db8b/c1b3fe48-aaf4-425a-a97c-e3c9a070db8b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1228.619196] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b97f526c-ad30-4d2a-9bc3-351acac07e46 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.622748] env[68569]: DEBUG nova.compute.manager [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1228.626735] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1228.626735] env[68569]: value = "task-3167927" [ 1228.626735] env[68569]: _type = "Task" [ 1228.626735] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.635026] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167927, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.658436] env[68569]: DEBUG nova.virt.hardware [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1228.658693] env[68569]: DEBUG nova.virt.hardware [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1228.658848] env[68569]: DEBUG nova.virt.hardware [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1228.659102] env[68569]: DEBUG nova.virt.hardware [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1228.659311] env[68569]: DEBUG nova.virt.hardware [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1228.659443] env[68569]: DEBUG nova.virt.hardware [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1228.659700] env[68569]: DEBUG nova.virt.hardware [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1228.659902] env[68569]: DEBUG nova.virt.hardware [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1228.660116] env[68569]: DEBUG nova.virt.hardware [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1228.660348] env[68569]: DEBUG nova.virt.hardware [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1228.660563] env[68569]: DEBUG nova.virt.hardware [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1228.661573] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269e65f0-2161-4bf9-99ba-a46e5ec3c4db {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.672813] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c64724-b3c6-4c3e-99e9-ae62be02cbfa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.754633] env[68569]: DEBUG oslo_vmware.api [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.829484] env[68569]: INFO nova.compute.manager [-] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Took 1.26 seconds to deallocate network for instance. [ 1228.892904] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ea8ad6e9-fb0f-4ec8-897c-767602ff7e84 tempest-AttachVolumeTestJSON-1584858481 tempest-AttachVolumeTestJSON-1584858481-project-member] Lock "5de9a459-a2a2-4d78-9a66-cf819e8893b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.832s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.141320] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167927, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.216540] env[68569]: DEBUG nova.compute.manager [req-52a580a3-3d8c-4c87-9ac7-e0663fbc18c8 req-6dc7035f-bfa2-46b9-9c16-47c44194f6db service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Received event network-vif-plugged-631ce46a-8b09-43bc-a285-96eaf4e3bc0c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1229.216721] env[68569]: DEBUG oslo_concurrency.lockutils [req-52a580a3-3d8c-4c87-9ac7-e0663fbc18c8 req-6dc7035f-bfa2-46b9-9c16-47c44194f6db service nova] Acquiring lock "e4fc902a-05c1-419c-9019-c22fa0f9ae9d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.217076] env[68569]: DEBUG oslo_concurrency.lockutils [req-52a580a3-3d8c-4c87-9ac7-e0663fbc18c8 req-6dc7035f-bfa2-46b9-9c16-47c44194f6db service nova] Lock "e4fc902a-05c1-419c-9019-c22fa0f9ae9d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.217369] env[68569]: DEBUG oslo_concurrency.lockutils [req-52a580a3-3d8c-4c87-9ac7-e0663fbc18c8 req-6dc7035f-bfa2-46b9-9c16-47c44194f6db service nova] Lock "e4fc902a-05c1-419c-9019-c22fa0f9ae9d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.217681] env[68569]: DEBUG nova.compute.manager [req-52a580a3-3d8c-4c87-9ac7-e0663fbc18c8 req-6dc7035f-bfa2-46b9-9c16-47c44194f6db service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] No waiting events found dispatching network-vif-plugged-631ce46a-8b09-43bc-a285-96eaf4e3bc0c {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1229.217758] env[68569]: WARNING nova.compute.manager [req-52a580a3-3d8c-4c87-9ac7-e0663fbc18c8 req-6dc7035f-bfa2-46b9-9c16-47c44194f6db service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Received unexpected event network-vif-plugged-631ce46a-8b09-43bc-a285-96eaf4e3bc0c for instance with vm_state building and task_state spawning. [ 1229.254109] env[68569]: DEBUG oslo_vmware.api [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.343511] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.343511] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.002s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.343511] env[68569]: DEBUG nova.objects.instance [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lazy-loading 'resources' on Instance uuid 61aa0997-ffa6-4551-bdaa-132026e240f9 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1229.349087] env[68569]: DEBUG nova.network.neutron [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Successfully updated port: 631ce46a-8b09-43bc-a285-96eaf4e3bc0c {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1229.639025] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167927, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559575} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.639025] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] c1b3fe48-aaf4-425a-a97c-e3c9a070db8b/c1b3fe48-aaf4-425a-a97c-e3c9a070db8b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1229.639025] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1229.639931] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8804498d-480b-4dfe-a858-f535fc86abe1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.646938] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1229.646938] env[68569]: value = "task-3167928" [ 1229.646938] env[68569]: _type = "Task" [ 1229.646938] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.654986] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167928, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.753441] env[68569]: DEBUG oslo_vmware.api [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.853399] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "refresh_cache-e4fc902a-05c1-419c-9019-c22fa0f9ae9d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.853622] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "refresh_cache-e4fc902a-05c1-419c-9019-c22fa0f9ae9d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1229.853808] env[68569]: DEBUG nova.network.neutron [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1229.950777] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516e2d8c-e0d9-4910-beb2-a10466672ea5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.958601] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d026cc2-2758-413a-820c-8d032c892d32 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.990238] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e385687d-ea2f-4b76-8e39-96eed25d9c32 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.998631] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51cd2a6f-b9f4-4f9c-bcc1-7e40b6d0cf4b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.013704] env[68569]: DEBUG nova.compute.provider_tree [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1230.156289] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167928, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065371} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.156537] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1230.157323] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da7a5fa-9146-4007-818c-3b19d84d41ea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.176902] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] c1b3fe48-aaf4-425a-a97c-e3c9a070db8b/c1b3fe48-aaf4-425a-a97c-e3c9a070db8b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1230.177213] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0963cc31-eee4-4ece-9398-5ca135f6319a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.196439] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1230.196439] env[68569]: value = "task-3167930" [ 1230.196439] env[68569]: _type = "Task" [ 1230.196439] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.204797] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167930, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.253809] env[68569]: DEBUG oslo_vmware.api [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.384513] env[68569]: DEBUG nova.network.neutron [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1230.516737] env[68569]: DEBUG nova.scheduler.client.report [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1230.553891] env[68569]: DEBUG nova.network.neutron [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Updating instance_info_cache with network_info: [{"id": "631ce46a-8b09-43bc-a285-96eaf4e3bc0c", "address": "fa:16:3e:43:35:2a", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap631ce46a-8b", "ovs_interfaceid": "631ce46a-8b09-43bc-a285-96eaf4e3bc0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.707669] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167930, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.754237] env[68569]: DEBUG oslo_vmware.api [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.022367] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.681s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.046408] env[68569]: INFO nova.scheduler.client.report [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Deleted allocations for instance 61aa0997-ffa6-4551-bdaa-132026e240f9 [ 1231.056736] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "refresh_cache-e4fc902a-05c1-419c-9019-c22fa0f9ae9d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1231.056987] env[68569]: DEBUG nova.compute.manager [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Instance network_info: |[{"id": "631ce46a-8b09-43bc-a285-96eaf4e3bc0c", "address": "fa:16:3e:43:35:2a", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap631ce46a-8b", "ovs_interfaceid": "631ce46a-8b09-43bc-a285-96eaf4e3bc0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1231.057732] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:35:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '631ce46a-8b09-43bc-a285-96eaf4e3bc0c', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1231.066201] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1231.066428] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1231.066651] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5b74bbf-deb5-4008-9b5c-efcd70179882 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.086558] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1231.086558] env[68569]: value = "task-3167931" [ 1231.086558] env[68569]: _type = "Task" [ 1231.086558] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.096733] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167931, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.207253] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167930, 'name': ReconfigVM_Task, 'duration_secs': 0.840775} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.207522] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Reconfigured VM instance instance-00000074 to attach disk [datastore2] c1b3fe48-aaf4-425a-a97c-e3c9a070db8b/c1b3fe48-aaf4-425a-a97c-e3c9a070db8b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1231.208208] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-43b8b28a-b714-46f3-9701-353651d83878 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.213906] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1231.213906] env[68569]: value = "task-3167932" [ 1231.213906] env[68569]: _type = "Task" [ 1231.213906] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.221630] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167932, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.245325] env[68569]: DEBUG nova.compute.manager [req-603e57ef-7872-4164-a771-9d65340cdd1b req-57a48a0a-eec0-4b28-b40e-a6847b56a94a service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Received event network-changed-631ce46a-8b09-43bc-a285-96eaf4e3bc0c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1231.245325] env[68569]: DEBUG nova.compute.manager [req-603e57ef-7872-4164-a771-9d65340cdd1b req-57a48a0a-eec0-4b28-b40e-a6847b56a94a service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Refreshing instance network info cache due to event network-changed-631ce46a-8b09-43bc-a285-96eaf4e3bc0c. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1231.245325] env[68569]: DEBUG oslo_concurrency.lockutils [req-603e57ef-7872-4164-a771-9d65340cdd1b req-57a48a0a-eec0-4b28-b40e-a6847b56a94a service nova] Acquiring lock "refresh_cache-e4fc902a-05c1-419c-9019-c22fa0f9ae9d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.245325] env[68569]: DEBUG oslo_concurrency.lockutils [req-603e57ef-7872-4164-a771-9d65340cdd1b req-57a48a0a-eec0-4b28-b40e-a6847b56a94a service nova] Acquired lock "refresh_cache-e4fc902a-05c1-419c-9019-c22fa0f9ae9d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1231.245325] env[68569]: DEBUG nova.network.neutron [req-603e57ef-7872-4164-a771-9d65340cdd1b req-57a48a0a-eec0-4b28-b40e-a6847b56a94a service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Refreshing network info cache for port 631ce46a-8b09-43bc-a285-96eaf4e3bc0c {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1231.256469] env[68569]: DEBUG oslo_vmware.api [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.428159] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.428380] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.554183] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d2e6b998-75ff-4dc4-ba8a-f4e8d39f8eca tempest-ServerActionsTestOtherA-130731114 tempest-ServerActionsTestOtherA-130731114-project-member] Lock "61aa0997-ffa6-4551-bdaa-132026e240f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.622s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.597026] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167931, 'name': CreateVM_Task, 'duration_secs': 0.357193} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.597026] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1231.597290] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.597398] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1231.597708] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1231.598119] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86c8e0de-4876-4ab7-b1d3-b329c3feb3cb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.603221] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1231.603221] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ef4b3f-73e6-06c2-31aa-53750e4d3ce8" [ 1231.603221] env[68569]: _type = "Task" [ 1231.603221] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.611104] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ef4b3f-73e6-06c2-31aa-53750e4d3ce8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.724335] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167932, 'name': Rename_Task, 'duration_secs': 0.332668} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.724739] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1231.724840] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-94eee2ca-9250-443d-a58f-f33c3ae12bd8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.732234] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1231.732234] env[68569]: value = "task-3167933" [ 1231.732234] env[68569]: _type = "Task" [ 1231.732234] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.739999] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167933, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.757648] env[68569]: DEBUG oslo_vmware.api [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.889548] env[68569]: DEBUG oslo_vmware.rw_handles [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52748f2a-0663-f2de-94cc-e76c92c24d56/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1231.890810] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee0111a-4157-473e-845f-f7ee1fbebde7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.897237] env[68569]: DEBUG oslo_vmware.rw_handles [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52748f2a-0663-f2de-94cc-e76c92c24d56/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1231.897356] env[68569]: ERROR oslo_vmware.rw_handles [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52748f2a-0663-f2de-94cc-e76c92c24d56/disk-0.vmdk due to incomplete transfer. [ 1231.897542] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-008c6796-a286-4bc4-8e1d-1594a698cdca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.906905] env[68569]: DEBUG oslo_vmware.rw_handles [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52748f2a-0663-f2de-94cc-e76c92c24d56/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1231.907146] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Uploaded image 9d214233-2275-4569-9f11-1ab35d8c5299 to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1231.909480] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1231.909735] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1a98a1ed-8139-4536-8f72-45462aa44d46 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.915148] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1231.915148] env[68569]: value = "task-3167934" [ 1231.915148] env[68569]: _type = "Task" [ 1231.915148] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.923070] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167934, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.931926] env[68569]: INFO nova.compute.manager [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Detaching volume 60cd4f2b-f681-462c-959e-39f6ce745f96 [ 1231.965152] env[68569]: INFO nova.virt.block_device [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Attempting to driver detach volume 60cd4f2b-f681-462c-959e-39f6ce745f96 from mountpoint /dev/sdb [ 1231.965152] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1231.965152] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633727', 'volume_id': '60cd4f2b-f681-462c-959e-39f6ce745f96', 'name': 'volume-60cd4f2b-f681-462c-959e-39f6ce745f96', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '6dbe8a18-c317-4b36-bd6f-922ce9f85b6a', 'attached_at': '', 'detached_at': '', 'volume_id': '60cd4f2b-f681-462c-959e-39f6ce745f96', 'serial': '60cd4f2b-f681-462c-959e-39f6ce745f96'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1231.965152] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c0c1ac-7020-463f-8284-1c81c908f0dd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.988788] env[68569]: DEBUG nova.network.neutron [req-603e57ef-7872-4164-a771-9d65340cdd1b req-57a48a0a-eec0-4b28-b40e-a6847b56a94a service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Updated VIF entry in instance network info cache for port 631ce46a-8b09-43bc-a285-96eaf4e3bc0c. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1231.989202] env[68569]: DEBUG nova.network.neutron [req-603e57ef-7872-4164-a771-9d65340cdd1b req-57a48a0a-eec0-4b28-b40e-a6847b56a94a service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Updating instance_info_cache with network_info: [{"id": "631ce46a-8b09-43bc-a285-96eaf4e3bc0c", "address": "fa:16:3e:43:35:2a", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap631ce46a-8b", "ovs_interfaceid": "631ce46a-8b09-43bc-a285-96eaf4e3bc0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.990919] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc4edc3-c4eb-4b8f-ab65-6bf60869814f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.998055] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade6e5c4-7aec-4a3c-95af-ca6a6a1d62e1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.020539] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d120ee-aa13-474b-acaa-bd53ea9b1127 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.036905] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] The volume has not been displaced from its original location: [datastore1] volume-60cd4f2b-f681-462c-959e-39f6ce745f96/volume-60cd4f2b-f681-462c-959e-39f6ce745f96.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1232.042197] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Reconfiguring VM instance instance-00000068 to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1232.042486] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4630b70-16c6-4d79-b8c6-8e668d276775 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.060454] env[68569]: DEBUG oslo_vmware.api [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1232.060454] env[68569]: value = "task-3167935" [ 1232.060454] env[68569]: _type = "Task" [ 1232.060454] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.069235] env[68569]: DEBUG oslo_vmware.api [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167935, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.114414] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ef4b3f-73e6-06c2-31aa-53750e4d3ce8, 'name': SearchDatastore_Task, 'duration_secs': 0.013504} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.114747] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1232.114937] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1232.115194] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.115487] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1232.115593] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1232.115872] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b261237-e6d8-4092-8dc9-d7642eb613d4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.125497] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1232.125946] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1232.126957] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56ad8e56-6a4b-428f-be72-ba92125279b6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.132216] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1232.132216] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526c3502-44b7-155d-aed4-caec7e4d6a81" [ 1232.132216] env[68569]: _type = "Task" [ 1232.132216] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.140132] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526c3502-44b7-155d-aed4-caec7e4d6a81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.241796] env[68569]: DEBUG oslo_vmware.api [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167933, 'name': PowerOnVM_Task, 'duration_secs': 0.507791} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.242082] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1232.242289] env[68569]: INFO nova.compute.manager [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Took 5.82 seconds to spawn the instance on the hypervisor. [ 1232.242465] env[68569]: DEBUG nova.compute.manager [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1232.243741] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076869a9-50b6-47d1-92e0-259cf825bf40 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.260652] env[68569]: DEBUG oslo_vmware.api [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.426875] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167934, 'name': Destroy_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.494931] env[68569]: DEBUG oslo_concurrency.lockutils [req-603e57ef-7872-4164-a771-9d65340cdd1b req-57a48a0a-eec0-4b28-b40e-a6847b56a94a service nova] Releasing lock "refresh_cache-e4fc902a-05c1-419c-9019-c22fa0f9ae9d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1232.573191] env[68569]: DEBUG oslo_vmware.api [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167935, 'name': ReconfigVM_Task, 'duration_secs': 0.374612} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.573191] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Reconfigured VM instance instance-00000068 to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1232.576067] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51fb739a-ba2e-44f2-af63-24d37f916268 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.590867] env[68569]: DEBUG oslo_vmware.api [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1232.590867] env[68569]: value = "task-3167936" [ 1232.590867] env[68569]: _type = "Task" [ 1232.590867] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.599739] env[68569]: DEBUG oslo_vmware.api [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167936, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.642196] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]526c3502-44b7-155d-aed4-caec7e4d6a81, 'name': SearchDatastore_Task, 'duration_secs': 0.012735} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.642927] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70b4677d-98ae-44e1-8793-7c6cd1cc262b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.648498] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1232.648498] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bea3bf-af6a-a97d-541d-b874d1d1eb33" [ 1232.648498] env[68569]: _type = "Task" [ 1232.648498] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.656170] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bea3bf-af6a-a97d-541d-b874d1d1eb33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.761661] env[68569]: DEBUG oslo_vmware.api [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167926, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.765389] env[68569]: INFO nova.compute.manager [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Took 14.16 seconds to build instance. [ 1232.927807] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167934, 'name': Destroy_Task, 'duration_secs': 0.610375} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.928070] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Destroyed the VM [ 1232.928322] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1232.932279] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-66b65008-22a1-4e5f-bfbc-8733fa2d3dec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.935043] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1232.935043] env[68569]: value = "task-3167938" [ 1232.935043] env[68569]: _type = "Task" [ 1232.935043] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.942757] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167938, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.100415] env[68569]: DEBUG oslo_vmware.api [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167936, 'name': ReconfigVM_Task, 'duration_secs': 0.278697} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.100731] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633727', 'volume_id': '60cd4f2b-f681-462c-959e-39f6ce745f96', 'name': 'volume-60cd4f2b-f681-462c-959e-39f6ce745f96', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '6dbe8a18-c317-4b36-bd6f-922ce9f85b6a', 'attached_at': '', 'detached_at': '', 'volume_id': '60cd4f2b-f681-462c-959e-39f6ce745f96', 'serial': '60cd4f2b-f681-462c-959e-39f6ce745f96'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1233.159524] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52bea3bf-af6a-a97d-541d-b874d1d1eb33, 'name': SearchDatastore_Task, 'duration_secs': 0.011717} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.161482] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1233.161482] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] e4fc902a-05c1-419c-9019-c22fa0f9ae9d/e4fc902a-05c1-419c-9019-c22fa0f9ae9d.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1233.161482] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-819e67d6-e087-44cf-89ef-702b3808febd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.167356] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1233.167356] env[68569]: value = "task-3167939" [ 1233.167356] env[68569]: _type = "Task" [ 1233.167356] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.177446] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.258825] env[68569]: DEBUG oslo_vmware.api [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167926, 'name': ReconfigVM_Task, 'duration_secs': 5.741136} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.260075] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1233.260075] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Reconfigured VM to detach interface {{(pid=68569) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1233.267633] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4b031da-4ff4-4ccf-a8cb-ddd293824c62 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Lock "c1b3fe48-aaf4-425a-a97c-e3c9a070db8b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.674s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.448403] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167938, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.656694] env[68569]: DEBUG nova.objects.instance [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lazy-loading 'flavor' on Instance uuid 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1233.682461] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167939, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.691937] env[68569]: INFO nova.compute.manager [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Rebuilding instance [ 1234.696401] env[68569]: DEBUG oslo_vmware.api [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167938, 'name': RemoveSnapshot_Task, 'duration_secs': 0.95345} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.698633] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1234.698917] env[68569]: INFO nova.compute.manager [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Took 14.58 seconds to snapshot the instance on the hypervisor. [ 1234.707895] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167939, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517502} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.708729] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] e4fc902a-05c1-419c-9019-c22fa0f9ae9d/e4fc902a-05c1-419c-9019-c22fa0f9ae9d.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1234.708992] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1234.709208] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68a3f89a-6b75-477d-925f-9b8c7f229f32 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.717733] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1234.717733] env[68569]: value = "task-3167940" [ 1234.717733] env[68569]: _type = "Task" [ 1234.717733] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.729520] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167940, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.748294] env[68569]: DEBUG nova.compute.manager [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1234.749500] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86bf6f78-4da0-47b6-a3c0-70a738f4b5f5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.197125] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.197370] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.197523] env[68569]: DEBUG nova.network.neutron [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1235.202120] env[68569]: DEBUG oslo_concurrency.lockutils [None req-aab13d38-94f9-4f89-95ac-7c275fde9804 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.774s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.227792] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167940, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063588} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.228717] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1235.229596] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6732407-fa34-42e1-b073-ba231b41244d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.263266] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] e4fc902a-05c1-419c-9019-c22fa0f9ae9d/e4fc902a-05c1-419c-9019-c22fa0f9ae9d.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1235.267889] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9dfeab22-0aa6-491c-816a-2ede02e95958 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.290949] env[68569]: DEBUG nova.compute.manager [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Found 3 images (rotation: 2) {{(pid=68569) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1235.291179] env[68569]: DEBUG nova.compute.manager [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Rotating out 1 backups {{(pid=68569) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 1235.291341] env[68569]: DEBUG nova.compute.manager [None req-99ecf713-9b0d-4140-a216-05a0867902bc tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Deleting image 5f41e5c3-e84e-4f6c-bb68-aa7db4c585c6 {{(pid=68569) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 1235.295164] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1235.295164] env[68569]: value = "task-3167942" [ 1235.295164] env[68569]: _type = "Task" [ 1235.295164] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.304735] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167942, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.459527] env[68569]: DEBUG nova.compute.manager [req-f43c4864-bc5e-492f-8961-10d4239576b6 req-fbdd6640-2e55-4cb8-a66f-ec0f2ae1cc5d service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Received event network-changed-1ca2f3e3-cee0-4e29-8728-97455622c4be {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1235.459718] env[68569]: DEBUG nova.compute.manager [req-f43c4864-bc5e-492f-8961-10d4239576b6 req-fbdd6640-2e55-4cb8-a66f-ec0f2ae1cc5d service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Refreshing instance network info cache due to event network-changed-1ca2f3e3-cee0-4e29-8728-97455622c4be. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1235.459900] env[68569]: DEBUG oslo_concurrency.lockutils [req-f43c4864-bc5e-492f-8961-10d4239576b6 req-fbdd6640-2e55-4cb8-a66f-ec0f2ae1cc5d service nova] Acquiring lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.737965] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.738327] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.738551] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.738761] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.738951] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.741230] env[68569]: INFO nova.compute.manager [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Terminating instance [ 1235.785007] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1235.785363] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dafda00b-1b4b-4ede-a659-c45078b728b9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.792582] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1235.792582] env[68569]: value = "task-3167943" [ 1235.792582] env[68569]: _type = "Task" [ 1235.792582] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.805167] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167942, 'name': ReconfigVM_Task, 'duration_secs': 0.315222} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.808765] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Reconfigured VM instance instance-00000075 to attach disk [datastore2] e4fc902a-05c1-419c-9019-c22fa0f9ae9d/e4fc902a-05c1-419c-9019-c22fa0f9ae9d.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1235.809526] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167943, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.809811] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bacdb0c4-4670-4cc5-86e4-fee3cb5f0407 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.815136] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1235.815136] env[68569]: value = "task-3167944" [ 1235.815136] env[68569]: _type = "Task" [ 1235.815136] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.825420] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167944, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.004665] env[68569]: INFO nova.network.neutron [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Port 9c043357-2161-4ad7-b828-4cc9886cbf9c from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1236.005085] env[68569]: DEBUG nova.network.neutron [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updating instance_info_cache with network_info: [{"id": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "address": "fa:16:3e:6d:8f:9f", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ca2f3e3-ce", "ovs_interfaceid": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.091652] env[68569]: DEBUG oslo_concurrency.lockutils [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "interface-693d59a2-f8f5-4f63-af55-192b0c458ddf-9c043357-2161-4ad7-b828-4cc9886cbf9c" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.091909] env[68569]: DEBUG oslo_concurrency.lockutils [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-693d59a2-f8f5-4f63-af55-192b0c458ddf-9c043357-2161-4ad7-b828-4cc9886cbf9c" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.092296] env[68569]: DEBUG nova.objects.instance [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'flavor' on Instance uuid 693d59a2-f8f5-4f63-af55-192b0c458ddf {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1236.247024] env[68569]: DEBUG nova.compute.manager [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1236.247024] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1236.247024] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330fb2a5-8f16-4d4e-ad62-0f8d0ee465c9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.255018] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1236.255018] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-436e343b-9cec-41f1-8266-5834b3d8d6ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.260663] env[68569]: DEBUG oslo_vmware.api [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1236.260663] env[68569]: value = "task-3167945" [ 1236.260663] env[68569]: _type = "Task" [ 1236.260663] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.268539] env[68569]: DEBUG oslo_vmware.api [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167945, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.302616] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167943, 'name': PowerOffVM_Task, 'duration_secs': 0.205985} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.303222] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1236.306355] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1236.306355] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf830c86-e63b-4509-9f1f-68d7b665e1ce {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.314018] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1236.314018] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e54d0c1f-6e29-4c25-8530-ce888dcb7769 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.325518] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167944, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.340942] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1236.340942] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1236.340942] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Deleting the datastore file [datastore2] c1b3fe48-aaf4-425a-a97c-e3c9a070db8b {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1236.340942] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0650fdea-d40a-432c-ada1-ebe4964fc878 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.346524] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1236.346524] env[68569]: value = "task-3167947" [ 1236.346524] env[68569]: _type = "Task" [ 1236.346524] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.355978] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167947, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.511104] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.513825] env[68569]: DEBUG oslo_concurrency.lockutils [req-f43c4864-bc5e-492f-8961-10d4239576b6 req-fbdd6640-2e55-4cb8-a66f-ec0f2ae1cc5d service nova] Acquired lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.514342] env[68569]: DEBUG nova.network.neutron [req-f43c4864-bc5e-492f-8961-10d4239576b6 req-fbdd6640-2e55-4cb8-a66f-ec0f2ae1cc5d service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Refreshing network info cache for port 1ca2f3e3-cee0-4e29-8728-97455622c4be {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1236.726370] env[68569]: DEBUG oslo_concurrency.lockutils [None req-739b7940-3a56-4033-96dd-54d87a37e2b9 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "fdcdd4b5-82bd-43c9-8865-807f86789a99" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.726637] env[68569]: DEBUG oslo_concurrency.lockutils [None req-739b7940-3a56-4033-96dd-54d87a37e2b9 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "fdcdd4b5-82bd-43c9-8865-807f86789a99" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.726816] env[68569]: DEBUG nova.compute.manager [None req-739b7940-3a56-4033-96dd-54d87a37e2b9 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1236.727729] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c4af6c6-b75e-427f-8ddf-b9f0e77747c8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.734321] env[68569]: DEBUG nova.compute.manager [None req-739b7940-3a56-4033-96dd-54d87a37e2b9 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68569) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1236.734872] env[68569]: DEBUG nova.objects.instance [None req-739b7940-3a56-4033-96dd-54d87a37e2b9 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lazy-loading 'flavor' on Instance uuid fdcdd4b5-82bd-43c9-8865-807f86789a99 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1236.745157] env[68569]: DEBUG nova.objects.instance [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'pci_requests' on Instance uuid 693d59a2-f8f5-4f63-af55-192b0c458ddf {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1236.770879] env[68569]: DEBUG oslo_vmware.api [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167945, 'name': PowerOffVM_Task, 'duration_secs': 0.18715} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.771292] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1236.771378] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1236.772351] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-90d20a6e-7f00-4e9b-aa51-2e39e0a08d34 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.825678] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167944, 'name': Rename_Task, 'duration_secs': 0.85358} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.825946] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1236.826230] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f16b662-216d-4fed-95a9-41652e19c671 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.832994] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1236.832994] env[68569]: value = "task-3167949" [ 1236.832994] env[68569]: _type = "Task" [ 1236.832994] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.838613] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1236.838820] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1236.839079] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleting the datastore file [datastore1] 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1236.839281] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8fab56f-84fc-4e41-9a6d-5b8838a39680 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.843827] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167949, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.850777] env[68569]: DEBUG oslo_vmware.api [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1236.850777] env[68569]: value = "task-3167950" [ 1236.850777] env[68569]: _type = "Task" [ 1236.850777] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.858437] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167947, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099196} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.859128] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1236.861684] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1236.861684] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1236.866481] env[68569]: DEBUG oslo_vmware.api [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167950, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.017868] env[68569]: DEBUG oslo_concurrency.lockutils [None req-ff0c9566-0560-436f-aa30-3db8742c2330 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-6317f756-c9ed-4858-bb2a-c20d9f82f90d-9c043357-2161-4ad7-b828-4cc9886cbf9c" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.367s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.247111] env[68569]: DEBUG nova.network.neutron [req-f43c4864-bc5e-492f-8961-10d4239576b6 req-fbdd6640-2e55-4cb8-a66f-ec0f2ae1cc5d service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updated VIF entry in instance network info cache for port 1ca2f3e3-cee0-4e29-8728-97455622c4be. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1237.247455] env[68569]: DEBUG nova.network.neutron [req-f43c4864-bc5e-492f-8961-10d4239576b6 req-fbdd6640-2e55-4cb8-a66f-ec0f2ae1cc5d service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updating instance_info_cache with network_info: [{"id": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "address": "fa:16:3e:6d:8f:9f", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ca2f3e3-ce", "ovs_interfaceid": "1ca2f3e3-cee0-4e29-8728-97455622c4be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.248625] env[68569]: DEBUG nova.objects.base [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Object Instance<693d59a2-f8f5-4f63-af55-192b0c458ddf> lazy-loaded attributes: flavor,pci_requests {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1237.248812] env[68569]: DEBUG nova.network.neutron [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1237.320595] env[68569]: DEBUG nova.policy [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b5878b8c7304fce9e150e9be38f10c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7613bcf5361d4b08a8d864e59b7fe858', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1237.343542] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167949, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.360903] env[68569]: DEBUG oslo_vmware.api [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167950, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.360903] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1237.362616] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1237.362616] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1237.362616] env[68569]: INFO nova.compute.manager [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1237.362616] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1237.362616] env[68569]: DEBUG nova.compute.manager [-] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1237.362616] env[68569]: DEBUG nova.network.neutron [-] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1237.741361] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-739b7940-3a56-4033-96dd-54d87a37e2b9 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1237.741757] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e042b028-95d6-4649-8ee8-c3180eb620ee {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.748949] env[68569]: DEBUG oslo_vmware.api [None req-739b7940-3a56-4033-96dd-54d87a37e2b9 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1237.748949] env[68569]: value = "task-3167951" [ 1237.748949] env[68569]: _type = "Task" [ 1237.748949] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.752374] env[68569]: DEBUG oslo_concurrency.lockutils [req-f43c4864-bc5e-492f-8961-10d4239576b6 req-fbdd6640-2e55-4cb8-a66f-ec0f2ae1cc5d service nova] Releasing lock "refresh_cache-6317f756-c9ed-4858-bb2a-c20d9f82f90d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.758426] env[68569]: DEBUG oslo_vmware.api [None req-739b7940-3a56-4033-96dd-54d87a37e2b9 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167951, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.790166] env[68569]: DEBUG nova.compute.manager [req-57890ea6-270f-4129-b816-ee40e477ece1 req-5daf8c12-7b7c-4bb9-855c-33009538cebb service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Received event network-changed-5bab5cde-e125-4604-9b7a-f3e491b5e7c8 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1237.790325] env[68569]: DEBUG nova.compute.manager [req-57890ea6-270f-4129-b816-ee40e477ece1 req-5daf8c12-7b7c-4bb9-855c-33009538cebb service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Refreshing instance network info cache due to event network-changed-5bab5cde-e125-4604-9b7a-f3e491b5e7c8. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1237.790569] env[68569]: DEBUG oslo_concurrency.lockutils [req-57890ea6-270f-4129-b816-ee40e477ece1 req-5daf8c12-7b7c-4bb9-855c-33009538cebb service nova] Acquiring lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.790688] env[68569]: DEBUG oslo_concurrency.lockutils [req-57890ea6-270f-4129-b816-ee40e477ece1 req-5daf8c12-7b7c-4bb9-855c-33009538cebb service nova] Acquired lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1237.790900] env[68569]: DEBUG nova.network.neutron [req-57890ea6-270f-4129-b816-ee40e477ece1 req-5daf8c12-7b7c-4bb9-855c-33009538cebb service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Refreshing network info cache for port 5bab5cde-e125-4604-9b7a-f3e491b5e7c8 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1237.845802] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167949, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.904978] env[68569]: DEBUG nova.virt.hardware [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1237.905280] env[68569]: DEBUG nova.virt.hardware [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1237.905442] env[68569]: DEBUG nova.virt.hardware [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1237.905638] env[68569]: DEBUG nova.virt.hardware [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1237.905797] env[68569]: DEBUG nova.virt.hardware [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1237.905998] env[68569]: DEBUG nova.virt.hardware [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1237.906190] env[68569]: DEBUG nova.virt.hardware [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1237.906377] env[68569]: DEBUG nova.virt.hardware [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1237.906539] env[68569]: DEBUG nova.virt.hardware [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1237.906715] env[68569]: DEBUG nova.virt.hardware [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1237.907050] env[68569]: DEBUG nova.virt.hardware [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1237.908158] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8ba6c3-43fe-46a4-b1d5-7ab51ec42a7c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.917801] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab473f2-6ca3-4964-b863-0760ddfd93a3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.935673] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1237.941637] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1237.943643] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1237.943985] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b0c9528-374f-4dda-9793-df43279cc27f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.963922] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1237.963922] env[68569]: value = "task-3167952" [ 1237.963922] env[68569]: _type = "Task" [ 1237.963922] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.972239] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167952, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.261179] env[68569]: DEBUG oslo_vmware.api [None req-739b7940-3a56-4033-96dd-54d87a37e2b9 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167951, 'name': PowerOffVM_Task, 'duration_secs': 0.249332} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.261342] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-739b7940-3a56-4033-96dd-54d87a37e2b9 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1238.261569] env[68569]: DEBUG nova.compute.manager [None req-739b7940-3a56-4033-96dd-54d87a37e2b9 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1238.262423] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d179d760-8087-4f61-8396-d442ec7ef6d5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.344372] env[68569]: DEBUG oslo_vmware.api [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167949, 'name': PowerOnVM_Task, 'duration_secs': 1.256272} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.344644] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1238.344870] env[68569]: INFO nova.compute.manager [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Took 9.72 seconds to spawn the instance on the hypervisor. [ 1238.345223] env[68569]: DEBUG nova.compute.manager [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1238.345997] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da89004d-9c19-438b-a9da-cca3d0ed8175 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.445015] env[68569]: DEBUG nova.network.neutron [-] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.475986] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167952, 'name': CreateVM_Task, 'duration_secs': 0.334069} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.476223] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1238.476604] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.476762] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1238.477115] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1238.479682] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bf151af-e76f-4734-a8e1-c39cc1ce9208 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.484318] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1238.484318] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c2ef0f-6106-6cea-577f-8aace14687f1" [ 1238.484318] env[68569]: _type = "Task" [ 1238.484318] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.491898] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c2ef0f-6106-6cea-577f-8aace14687f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.575135] env[68569]: DEBUG nova.network.neutron [req-57890ea6-270f-4129-b816-ee40e477ece1 req-5daf8c12-7b7c-4bb9-855c-33009538cebb service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Updated VIF entry in instance network info cache for port 5bab5cde-e125-4604-9b7a-f3e491b5e7c8. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1238.575311] env[68569]: DEBUG nova.network.neutron [req-57890ea6-270f-4129-b816-ee40e477ece1 req-5daf8c12-7b7c-4bb9-855c-33009538cebb service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Updating instance_info_cache with network_info: [{"id": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "address": "fa:16:3e:42:d5:d8", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bab5cde-e1", "ovs_interfaceid": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.776037] env[68569]: DEBUG oslo_concurrency.lockutils [None req-739b7940-3a56-4033-96dd-54d87a37e2b9 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "fdcdd4b5-82bd-43c9-8865-807f86789a99" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.049s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.853842] env[68569]: DEBUG nova.network.neutron [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Successfully updated port: 9c043357-2161-4ad7-b828-4cc9886cbf9c {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1238.867903] env[68569]: DEBUG nova.compute.manager [req-3c002b09-9da1-4083-a818-df6f633df2c9 req-d42c569b-7ee0-4e34-b3e0-43ec0fd4fdf2 service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Received event network-vif-plugged-9c043357-2161-4ad7-b828-4cc9886cbf9c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1238.869276] env[68569]: DEBUG oslo_concurrency.lockutils [req-3c002b09-9da1-4083-a818-df6f633df2c9 req-d42c569b-7ee0-4e34-b3e0-43ec0fd4fdf2 service nova] Acquiring lock "693d59a2-f8f5-4f63-af55-192b0c458ddf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1238.869563] env[68569]: DEBUG oslo_concurrency.lockutils [req-3c002b09-9da1-4083-a818-df6f633df2c9 req-d42c569b-7ee0-4e34-b3e0-43ec0fd4fdf2 service nova] Lock "693d59a2-f8f5-4f63-af55-192b0c458ddf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1238.869782] env[68569]: DEBUG oslo_concurrency.lockutils [req-3c002b09-9da1-4083-a818-df6f633df2c9 req-d42c569b-7ee0-4e34-b3e0-43ec0fd4fdf2 service nova] Lock "693d59a2-f8f5-4f63-af55-192b0c458ddf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.870196] env[68569]: DEBUG nova.compute.manager [req-3c002b09-9da1-4083-a818-df6f633df2c9 req-d42c569b-7ee0-4e34-b3e0-43ec0fd4fdf2 service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] No waiting events found dispatching network-vif-plugged-9c043357-2161-4ad7-b828-4cc9886cbf9c {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1238.870308] env[68569]: WARNING nova.compute.manager [req-3c002b09-9da1-4083-a818-df6f633df2c9 req-d42c569b-7ee0-4e34-b3e0-43ec0fd4fdf2 service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Received unexpected event network-vif-plugged-9c043357-2161-4ad7-b828-4cc9886cbf9c for instance with vm_state active and task_state None. [ 1238.871755] env[68569]: INFO nova.compute.manager [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Took 15.57 seconds to build instance. [ 1238.946875] env[68569]: INFO nova.compute.manager [-] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Took 1.58 seconds to deallocate network for instance. [ 1238.996817] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52c2ef0f-6106-6cea-577f-8aace14687f1, 'name': SearchDatastore_Task, 'duration_secs': 0.009304} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.997176] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1238.997452] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1238.997725] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.997908] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1238.999044] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1238.999381] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4bf22750-ff8a-4c12-9617-2264bb7ddd7d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.009169] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1239.009468] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1239.010509] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef76ea06-b089-4bd8-9ba2-e29596fbdf35 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.015960] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1239.015960] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524c2e99-d4b8-8687-d48d-8831f3e2bec3" [ 1239.015960] env[68569]: _type = "Task" [ 1239.015960] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.025135] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524c2e99-d4b8-8687-d48d-8831f3e2bec3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.078508] env[68569]: DEBUG oslo_concurrency.lockutils [req-57890ea6-270f-4129-b816-ee40e477ece1 req-5daf8c12-7b7c-4bb9-855c-33009538cebb service nova] Releasing lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1239.304253] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Acquiring lock "9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1239.304669] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Lock "9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1239.366698] env[68569]: DEBUG oslo_concurrency.lockutils [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.366906] env[68569]: DEBUG oslo_concurrency.lockutils [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.367120] env[68569]: DEBUG nova.network.neutron [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1239.374565] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3de579df-f624-4e8b-ad18-f6a87267c665 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "e4fc902a-05c1-419c-9019-c22fa0f9ae9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.079s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.453718] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1239.454030] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1239.454288] env[68569]: DEBUG nova.objects.instance [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lazy-loading 'resources' on Instance uuid 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.527822] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524c2e99-d4b8-8687-d48d-8831f3e2bec3, 'name': SearchDatastore_Task, 'duration_secs': 0.008975} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.528981] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b9f51e2-fd74-4098-bd69-f52ca43a6f6e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.534437] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1239.534437] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b2b961-9763-dfde-da58-12778e09b999" [ 1239.534437] env[68569]: _type = "Task" [ 1239.534437] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.542711] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b2b961-9763-dfde-da58-12778e09b999, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.810343] env[68569]: DEBUG nova.compute.manager [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1239.830918] env[68569]: DEBUG nova.compute.manager [req-75718041-711e-48ff-a999-6def3202da7d req-ef2bbc3f-58ba-4a2e-a036-d66e6fdef583 service nova] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Received event network-vif-deleted-8b05f57b-2ff2-49af-8333-0047f5230208 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1239.903688] env[68569]: WARNING nova.network.neutron [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] 01780a40-9441-415d-988a-24afe70ba382 already exists in list: networks containing: ['01780a40-9441-415d-988a-24afe70ba382']. ignoring it [ 1240.047153] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b2b961-9763-dfde-da58-12778e09b999, 'name': SearchDatastore_Task, 'duration_secs': 0.010423} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.047720] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1240.047720] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] c1b3fe48-aaf4-425a-a97c-e3c9a070db8b/c1b3fe48-aaf4-425a-a97c-e3c9a070db8b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1240.048659] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52471a9b-992a-45be-8275-4f04e3525fc5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.054509] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1240.054509] env[68569]: value = "task-3167953" [ 1240.054509] env[68569]: _type = "Task" [ 1240.054509] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.068558] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167953, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.097953] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ad5845-ef40-4891-93e0-d43042afbc37 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.108329] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec8e8d5-8306-43fd-8579-cd0b977c50b1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.144700] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5432a7d-c0cf-471c-8e21-cdd130e9ac9c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.152454] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e0f3b3-d272-4e22-bce0-3addd32cf29b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.167917] env[68569]: DEBUG nova.compute.provider_tree [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1240.214552] env[68569]: DEBUG nova.network.neutron [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Updating instance_info_cache with network_info: [{"id": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "address": "fa:16:3e:42:d5:d8", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bab5cde-e1", "ovs_interfaceid": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9c043357-2161-4ad7-b828-4cc9886cbf9c", "address": "fa:16:3e:63:fe:1b", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c043357-21", "ovs_interfaceid": "9c043357-2161-4ad7-b828-4cc9886cbf9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.337534] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.541803] env[68569]: DEBUG nova.compute.manager [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Stashing vm_state: stopped {{(pid=68569) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1240.563794] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167953, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494602} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.564110] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] c1b3fe48-aaf4-425a-a97c-e3c9a070db8b/c1b3fe48-aaf4-425a-a97c-e3c9a070db8b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1240.564285] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1240.564591] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97e2cbac-59b3-4bcb-967d-ebbb45ca7547 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.570816] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1240.570816] env[68569]: value = "task-3167954" [ 1240.570816] env[68569]: _type = "Task" [ 1240.570816] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.578759] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167954, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.671076] env[68569]: DEBUG nova.scheduler.client.report [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1240.720307] env[68569]: DEBUG oslo_concurrency.lockutils [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1240.720969] env[68569]: DEBUG oslo_concurrency.lockutils [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.721167] env[68569]: DEBUG oslo_concurrency.lockutils [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1240.722049] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b94b8ef-840d-4d07-ada2-621842fd69ca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.739368] env[68569]: DEBUG nova.virt.hardware [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1240.739618] env[68569]: DEBUG nova.virt.hardware [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1240.739751] env[68569]: DEBUG nova.virt.hardware [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1240.739951] env[68569]: DEBUG nova.virt.hardware [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1240.740110] env[68569]: DEBUG nova.virt.hardware [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1240.740281] env[68569]: DEBUG nova.virt.hardware [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1240.740482] env[68569]: DEBUG nova.virt.hardware [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1240.740655] env[68569]: DEBUG nova.virt.hardware [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1240.740816] env[68569]: DEBUG nova.virt.hardware [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1240.740991] env[68569]: DEBUG nova.virt.hardware [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1240.741178] env[68569]: DEBUG nova.virt.hardware [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1240.747448] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Reconfiguring VM to attach interface {{(pid=68569) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1240.747788] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c684699-ca2e-4056-8515-b67071a2fc57 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.765581] env[68569]: DEBUG oslo_vmware.api [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1240.765581] env[68569]: value = "task-3167955" [ 1240.765581] env[68569]: _type = "Task" [ 1240.765581] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.773486] env[68569]: DEBUG oslo_vmware.api [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167955, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.896145] env[68569]: DEBUG nova.compute.manager [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Received event network-changed-9c043357-2161-4ad7-b828-4cc9886cbf9c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1240.896421] env[68569]: DEBUG nova.compute.manager [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Refreshing instance network info cache due to event network-changed-9c043357-2161-4ad7-b828-4cc9886cbf9c. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1240.896553] env[68569]: DEBUG oslo_concurrency.lockutils [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] Acquiring lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.896733] env[68569]: DEBUG oslo_concurrency.lockutils [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] Acquired lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1240.896846] env[68569]: DEBUG nova.network.neutron [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Refreshing network info cache for port 9c043357-2161-4ad7-b828-4cc9886cbf9c {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1241.062653] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.080457] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167954, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06933} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.080790] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1241.081729] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b1b100-6519-4f07-9f9c-1bdb11e3231f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.101979] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] c1b3fe48-aaf4-425a-a97c-e3c9a070db8b/c1b3fe48-aaf4-425a-a97c-e3c9a070db8b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1241.102241] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2dc4a25-cb83-4188-b541-e09f72be3b28 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.121542] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1241.121542] env[68569]: value = "task-3167956" [ 1241.121542] env[68569]: _type = "Task" [ 1241.121542] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.130777] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167956, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.175975] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.722s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.178613] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.841s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.180132] env[68569]: INFO nova.compute.claims [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1241.196108] env[68569]: INFO nova.scheduler.client.report [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleted allocations for instance 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a [ 1241.276350] env[68569]: DEBUG oslo_vmware.api [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167955, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.631776] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167956, 'name': ReconfigVM_Task, 'duration_secs': 0.290975} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.632042] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Reconfigured VM instance instance-00000074 to attach disk [datastore1] c1b3fe48-aaf4-425a-a97c-e3c9a070db8b/c1b3fe48-aaf4-425a-a97c-e3c9a070db8b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1241.632685] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d973b41-0310-448d-8cf9-f737497110c3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.639269] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1241.639269] env[68569]: value = "task-3167957" [ 1241.639269] env[68569]: _type = "Task" [ 1241.639269] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.651128] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167957, 'name': Rename_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.704050] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c5c73b27-7d31-4b12-8670-f59687bdcd2b tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "6dbe8a18-c317-4b36-bd6f-922ce9f85b6a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.966s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.726233] env[68569]: DEBUG nova.network.neutron [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Updated VIF entry in instance network info cache for port 9c043357-2161-4ad7-b828-4cc9886cbf9c. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1241.726735] env[68569]: DEBUG nova.network.neutron [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Updating instance_info_cache with network_info: [{"id": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "address": "fa:16:3e:42:d5:d8", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bab5cde-e1", "ovs_interfaceid": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9c043357-2161-4ad7-b828-4cc9886cbf9c", "address": "fa:16:3e:63:fe:1b", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c043357-21", "ovs_interfaceid": "9c043357-2161-4ad7-b828-4cc9886cbf9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.776780] env[68569]: DEBUG oslo_vmware.api [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167955, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.149268] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167957, 'name': Rename_Task, 'duration_secs': 0.138603} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.149623] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1242.149816] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db6d2652-66e5-4f54-908b-f09e0868e40a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.156751] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1242.156751] env[68569]: value = "task-3167958" [ 1242.156751] env[68569]: _type = "Task" [ 1242.156751] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.164469] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167958, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.229345] env[68569]: DEBUG oslo_concurrency.lockutils [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] Releasing lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.229594] env[68569]: DEBUG nova.compute.manager [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Received event network-changed-631ce46a-8b09-43bc-a285-96eaf4e3bc0c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1242.230340] env[68569]: DEBUG nova.compute.manager [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Refreshing instance network info cache due to event network-changed-631ce46a-8b09-43bc-a285-96eaf4e3bc0c. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1242.230340] env[68569]: DEBUG oslo_concurrency.lockutils [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] Acquiring lock "refresh_cache-e4fc902a-05c1-419c-9019-c22fa0f9ae9d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.230340] env[68569]: DEBUG oslo_concurrency.lockutils [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] Acquired lock "refresh_cache-e4fc902a-05c1-419c-9019-c22fa0f9ae9d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.230340] env[68569]: DEBUG nova.network.neutron [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Refreshing network info cache for port 631ce46a-8b09-43bc-a285-96eaf4e3bc0c {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1242.278708] env[68569]: DEBUG oslo_vmware.api [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167955, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.294938] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5dc78ec-3d4c-4fd8-abbb-43fc7a2236f3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.302515] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d972fa-8554-4517-9256-e9dd31d520bd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.334333] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1946d4e0-96fe-42b9-b315-89edc49fdafd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.341856] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344fcf9e-5c7b-4b3e-b4e4-917424dbab35 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.355061] env[68569]: DEBUG nova.compute.provider_tree [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1242.666146] env[68569]: DEBUG oslo_vmware.api [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167958, 'name': PowerOnVM_Task, 'duration_secs': 0.416488} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.666395] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1242.667012] env[68569]: DEBUG nova.compute.manager [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1242.667367] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3423d8f-a482-4551-8fd5-890208aca397 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.779304] env[68569]: DEBUG oslo_vmware.api [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167955, 'name': ReconfigVM_Task, 'duration_secs': 1.590153} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.779808] env[68569]: DEBUG oslo_concurrency.lockutils [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.780016] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Reconfigured VM to attach interface {{(pid=68569) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1242.858185] env[68569]: DEBUG nova.scheduler.client.report [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1242.947223] env[68569]: DEBUG nova.network.neutron [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Updated VIF entry in instance network info cache for port 631ce46a-8b09-43bc-a285-96eaf4e3bc0c. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1242.947557] env[68569]: DEBUG nova.network.neutron [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Updating instance_info_cache with network_info: [{"id": "631ce46a-8b09-43bc-a285-96eaf4e3bc0c", "address": "fa:16:3e:43:35:2a", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap631ce46a-8b", "ovs_interfaceid": "631ce46a-8b09-43bc-a285-96eaf4e3bc0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.184844] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.246037] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "d57645fc-82d5-4ae8-93c3-0de095a66649" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.246248] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.286587] env[68569]: DEBUG oslo_concurrency.lockutils [None req-54fcf694-3642-4592-bc9f-a6cf5e494d6d tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-693d59a2-f8f5-4f63-af55-192b0c458ddf-9c043357-2161-4ad7-b828-4cc9886cbf9c" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.195s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.364825] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.186s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.366312] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.303s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.450544] env[68569]: DEBUG oslo_concurrency.lockutils [req-5fe4738e-cfc1-4a4e-a26b-6b296aba2772 req-a1e0ef44-6022-4c1c-af86-cf22def3568b service nova] Releasing lock "refresh_cache-e4fc902a-05c1-419c-9019-c22fa0f9ae9d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.732883] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquiring lock "c1b3fe48-aaf4-425a-a97c-e3c9a070db8b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.733049] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Lock "c1b3fe48-aaf4-425a-a97c-e3c9a070db8b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.733252] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquiring lock "c1b3fe48-aaf4-425a-a97c-e3c9a070db8b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.733429] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Lock "c1b3fe48-aaf4-425a-a97c-e3c9a070db8b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.733593] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Lock "c1b3fe48-aaf4-425a-a97c-e3c9a070db8b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.735721] env[68569]: INFO nova.compute.manager [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Terminating instance [ 1243.747840] env[68569]: DEBUG nova.compute.manager [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1243.868449] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Acquiring lock "d5d6f626-5c35-4d69-86cd-8f41cda55145" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.868691] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Lock "d5d6f626-5c35-4d69-86cd-8f41cda55145" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.871883] env[68569]: INFO nova.compute.claims [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1244.240848] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquiring lock "refresh_cache-c1b3fe48-aaf4-425a-a97c-e3c9a070db8b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.241192] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquired lock "refresh_cache-c1b3fe48-aaf4-425a-a97c-e3c9a070db8b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.241238] env[68569]: DEBUG nova.network.neutron [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1244.268809] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.377017] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Lock "d5d6f626-5c35-4d69-86cd-8f41cda55145" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.507s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.377017] env[68569]: DEBUG nova.compute.manager [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1244.381024] env[68569]: INFO nova.compute.resource_tracker [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Updating resource usage from migration 33cf9be8-0a37-4c1c-ba92-ea70457625d4 [ 1244.473746] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0faf186a-35a4-4837-9840-4401906ed475 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.481419] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c317c6-e85b-48ad-8588-2370a20b153f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.511906] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17def802-4d10-4c23-85bf-8e099cf8ccab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.519900] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1e1d9f-336f-4b80-a809-c8fa0ed73c26 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.533345] env[68569]: DEBUG nova.compute.provider_tree [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1244.761422] env[68569]: DEBUG nova.network.neutron [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1244.783574] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "interface-693d59a2-f8f5-4f63-af55-192b0c458ddf-9c043357-2161-4ad7-b828-4cc9886cbf9c" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.783799] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-693d59a2-f8f5-4f63-af55-192b0c458ddf-9c043357-2161-4ad7-b828-4cc9886cbf9c" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.805518] env[68569]: DEBUG nova.network.neutron [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.885051] env[68569]: DEBUG nova.compute.utils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1244.886463] env[68569]: DEBUG nova.compute.manager [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1244.886636] env[68569]: DEBUG nova.network.neutron [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1244.923387] env[68569]: DEBUG nova.policy [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e0debeb842d4fea9bb3b575da6eb174', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8884fcdb3d94b23b664f5dcb67d0cc4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1245.036349] env[68569]: DEBUG nova.scheduler.client.report [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1245.225500] env[68569]: DEBUG nova.network.neutron [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Successfully created port: bf205f6e-4996-4b3f-907a-20d3c8f9368f {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1245.286981] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.287341] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1245.288124] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ce9b34-84e8-4ff5-8aa2-e5e86c9f1c9f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.306520] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294ca809-9261-4d6d-891a-405574ececbb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.309503] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Releasing lock "refresh_cache-c1b3fe48-aaf4-425a-a97c-e3c9a070db8b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.309892] env[68569]: DEBUG nova.compute.manager [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1245.310099] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1245.311169] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869cf5e2-a63a-45df-99d4-14d814bbc813 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.339335] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Reconfiguring VM to detach interface {{(pid=68569) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1245.341590] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c221edb-481d-4b2e-8eab-6ba871705f6f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.354361] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1245.354633] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a506081-3268-4c3c-b331-0311d894e326 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.363132] env[68569]: DEBUG oslo_vmware.api [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1245.363132] env[68569]: value = "task-3167959" [ 1245.363132] env[68569]: _type = "Task" [ 1245.363132] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.364153] env[68569]: DEBUG oslo_vmware.api [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1245.364153] env[68569]: value = "task-3167960" [ 1245.364153] env[68569]: _type = "Task" [ 1245.364153] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.376080] env[68569]: DEBUG oslo_vmware.api [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167959, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.379291] env[68569]: DEBUG oslo_vmware.api [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167960, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.390861] env[68569]: DEBUG nova.compute.manager [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1245.541831] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.176s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.542026] env[68569]: INFO nova.compute.manager [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Migrating [ 1245.549058] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.364s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1245.549314] env[68569]: DEBUG nova.objects.instance [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68569) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1245.878488] env[68569]: DEBUG oslo_vmware.api [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167960, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.881616] env[68569]: DEBUG oslo_vmware.api [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167959, 'name': PowerOffVM_Task, 'duration_secs': 0.207227} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.881866] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1245.882040] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1245.882292] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da9a6b08-c210-4930-956d-fc6e6e939533 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.910747] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1245.910990] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1245.911234] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Deleting the datastore file [datastore1] c1b3fe48-aaf4-425a-a97c-e3c9a070db8b {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1245.911421] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25c0b2ef-2cdf-49e6-b960-a9d5a9a7697f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.917836] env[68569]: DEBUG oslo_vmware.api [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for the task: (returnval){ [ 1245.917836] env[68569]: value = "task-3167962" [ 1245.917836] env[68569]: _type = "Task" [ 1245.917836] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.926734] env[68569]: DEBUG oslo_vmware.api [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167962, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.062084] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "refresh_cache-fdcdd4b5-82bd-43c9-8865-807f86789a99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.062296] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired lock "refresh_cache-fdcdd4b5-82bd-43c9-8865-807f86789a99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1246.062471] env[68569]: DEBUG nova.network.neutron [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1246.380212] env[68569]: DEBUG oslo_vmware.api [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167960, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.399484] env[68569]: DEBUG nova.compute.manager [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1246.424268] env[68569]: DEBUG nova.virt.hardware [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1246.424438] env[68569]: DEBUG nova.virt.hardware [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1246.424575] env[68569]: DEBUG nova.virt.hardware [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1246.424759] env[68569]: DEBUG nova.virt.hardware [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1246.424903] env[68569]: DEBUG nova.virt.hardware [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1246.425060] env[68569]: DEBUG nova.virt.hardware [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1246.425275] env[68569]: DEBUG nova.virt.hardware [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1246.425433] env[68569]: DEBUG nova.virt.hardware [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1246.425598] env[68569]: DEBUG nova.virt.hardware [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1246.425755] env[68569]: DEBUG nova.virt.hardware [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1246.425944] env[68569]: DEBUG nova.virt.hardware [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1246.426756] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc57e9cc-f995-40dc-86aa-dd651c8d5c0c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.431923] env[68569]: DEBUG oslo_vmware.api [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Task: {'id': task-3167962, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093226} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.432496] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1246.432684] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1246.432858] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1246.433036] env[68569]: INFO nova.compute.manager [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1246.433282] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1246.433470] env[68569]: DEBUG nova.compute.manager [-] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1246.433571] env[68569]: DEBUG nova.network.neutron [-] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1246.438078] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e04e241-3732-453a-b274-f566ec78dcfc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.455634] env[68569]: DEBUG nova.network.neutron [-] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1246.559136] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9f46a495-3f59-441e-993b-0387436c85bf tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1246.560721] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.292s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.561749] env[68569]: INFO nova.compute.claims [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1246.773876] env[68569]: DEBUG nova.network.neutron [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Updating instance_info_cache with network_info: [{"id": "234f9512-640a-4c30-9ae1-166d66a910bc", "address": "fa:16:3e:77:b3:c7", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap234f9512-64", "ovs_interfaceid": "234f9512-640a-4c30-9ae1-166d66a910bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.860629] env[68569]: DEBUG nova.compute.manager [req-0493f5a9-4a02-4eda-a771-c316d59567f5 req-7f57056a-ef5a-4f50-97e6-123d76a808c5 service nova] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Received event network-vif-plugged-bf205f6e-4996-4b3f-907a-20d3c8f9368f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1246.860890] env[68569]: DEBUG oslo_concurrency.lockutils [req-0493f5a9-4a02-4eda-a771-c316d59567f5 req-7f57056a-ef5a-4f50-97e6-123d76a808c5 service nova] Acquiring lock "9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1246.861193] env[68569]: DEBUG oslo_concurrency.lockutils [req-0493f5a9-4a02-4eda-a771-c316d59567f5 req-7f57056a-ef5a-4f50-97e6-123d76a808c5 service nova] Lock "9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.861282] env[68569]: DEBUG oslo_concurrency.lockutils [req-0493f5a9-4a02-4eda-a771-c316d59567f5 req-7f57056a-ef5a-4f50-97e6-123d76a808c5 service nova] Lock "9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1246.861448] env[68569]: DEBUG nova.compute.manager [req-0493f5a9-4a02-4eda-a771-c316d59567f5 req-7f57056a-ef5a-4f50-97e6-123d76a808c5 service nova] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] No waiting events found dispatching network-vif-plugged-bf205f6e-4996-4b3f-907a-20d3c8f9368f {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1246.861610] env[68569]: WARNING nova.compute.manager [req-0493f5a9-4a02-4eda-a771-c316d59567f5 req-7f57056a-ef5a-4f50-97e6-123d76a808c5 service nova] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Received unexpected event network-vif-plugged-bf205f6e-4996-4b3f-907a-20d3c8f9368f for instance with vm_state building and task_state spawning. [ 1246.881306] env[68569]: DEBUG oslo_vmware.api [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167960, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.956432] env[68569]: DEBUG nova.network.neutron [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Successfully updated port: bf205f6e-4996-4b3f-907a-20d3c8f9368f {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1246.957478] env[68569]: DEBUG nova.network.neutron [-] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.276641] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lock "refresh_cache-fdcdd4b5-82bd-43c9-8865-807f86789a99" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1247.381941] env[68569]: DEBUG oslo_vmware.api [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167960, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.459687] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Acquiring lock "refresh_cache-9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.459687] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Acquired lock "refresh_cache-9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1247.459687] env[68569]: DEBUG nova.network.neutron [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1247.462352] env[68569]: INFO nova.compute.manager [-] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Took 1.03 seconds to deallocate network for instance. [ 1247.680891] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c9f6eb-635e-4014-bd41-4b20bbd71cc3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.690100] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3786fbbc-51ef-4da7-a930-341272cc543e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.721794] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a155aa07-73b1-4b5e-9b5d-a4467320f16e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.729301] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c408998e-d3dc-407e-8e4f-8f5089d77fc9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.742299] env[68569]: DEBUG nova.compute.provider_tree [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1247.881605] env[68569]: DEBUG oslo_vmware.api [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167960, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.967920] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1248.009164] env[68569]: DEBUG nova.network.neutron [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1248.136696] env[68569]: DEBUG nova.network.neutron [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Updating instance_info_cache with network_info: [{"id": "bf205f6e-4996-4b3f-907a-20d3c8f9368f", "address": "fa:16:3e:c1:fd:6b", "network": {"id": "c5f852b6-eea6-405a-a13f-32dcb0559921", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1163262440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8884fcdb3d94b23b664f5dcb67d0cc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf205f6e-49", "ovs_interfaceid": "bf205f6e-4996-4b3f-907a-20d3c8f9368f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.245569] env[68569]: DEBUG nova.scheduler.client.report [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1248.382487] env[68569]: DEBUG oslo_vmware.api [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167960, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.639450] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Releasing lock "refresh_cache-9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1248.639793] env[68569]: DEBUG nova.compute.manager [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Instance network_info: |[{"id": "bf205f6e-4996-4b3f-907a-20d3c8f9368f", "address": "fa:16:3e:c1:fd:6b", "network": {"id": "c5f852b6-eea6-405a-a13f-32dcb0559921", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1163262440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8884fcdb3d94b23b664f5dcb67d0cc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf205f6e-49", "ovs_interfaceid": "bf205f6e-4996-4b3f-907a-20d3c8f9368f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1248.640246] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:fd:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20e3f794-c7a3-4696-9488-ecf34c570ef9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf205f6e-4996-4b3f-907a-20d3c8f9368f', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1248.648317] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Creating folder: Project (c8884fcdb3d94b23b664f5dcb67d0cc4). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1248.648601] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c0cd4975-42f2-4404-a5ec-452524e5eaa9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.661174] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Created folder: Project (c8884fcdb3d94b23b664f5dcb67d0cc4) in parent group-v633430. [ 1248.661602] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Creating folder: Instances. Parent ref: group-v633748. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1248.661882] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97472177-a864-4d99-8952-8c23c4fe4c5a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.673281] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Created folder: Instances in parent group-v633748. [ 1248.673567] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1248.674038] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1248.674038] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-686c8248-2482-420c-a6b2-b0c8169d4a5b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.694047] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1248.694047] env[68569]: value = "task-3167965" [ 1248.694047] env[68569]: _type = "Task" [ 1248.694047] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.701607] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167965, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.751393] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.191s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1248.751978] env[68569]: DEBUG nova.compute.manager [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1248.755261] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.787s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1248.755586] env[68569]: DEBUG nova.objects.instance [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Lazy-loading 'resources' on Instance uuid c1b3fe48-aaf4-425a-a97c-e3c9a070db8b {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1248.884473] env[68569]: DEBUG oslo_vmware.api [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167960, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.892694] env[68569]: DEBUG nova.compute.manager [req-20c79e24-717a-4a3b-93e8-3d285e508d69 req-dc0a844d-1d87-4133-86a3-a8410b8c5f7b service nova] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Received event network-changed-bf205f6e-4996-4b3f-907a-20d3c8f9368f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1248.892984] env[68569]: DEBUG nova.compute.manager [req-20c79e24-717a-4a3b-93e8-3d285e508d69 req-dc0a844d-1d87-4133-86a3-a8410b8c5f7b service nova] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Refreshing instance network info cache due to event network-changed-bf205f6e-4996-4b3f-907a-20d3c8f9368f. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1248.893322] env[68569]: DEBUG oslo_concurrency.lockutils [req-20c79e24-717a-4a3b-93e8-3d285e508d69 req-dc0a844d-1d87-4133-86a3-a8410b8c5f7b service nova] Acquiring lock "refresh_cache-9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.893502] env[68569]: DEBUG oslo_concurrency.lockutils [req-20c79e24-717a-4a3b-93e8-3d285e508d69 req-dc0a844d-1d87-4133-86a3-a8410b8c5f7b service nova] Acquired lock "refresh_cache-9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1248.893773] env[68569]: DEBUG nova.network.neutron [req-20c79e24-717a-4a3b-93e8-3d285e508d69 req-dc0a844d-1d87-4133-86a3-a8410b8c5f7b service nova] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Refreshing network info cache for port bf205f6e-4996-4b3f-907a-20d3c8f9368f {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1249.205495] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167965, 'name': CreateVM_Task, 'duration_secs': 0.278389} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.205717] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1249.206444] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.206612] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1249.206944] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1249.207219] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bfc012c-3fd9-4a84-b52b-0f2ca4a40160 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.212208] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Waiting for the task: (returnval){ [ 1249.212208] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f51911-3c34-2662-ee00-3c2444fe2356" [ 1249.212208] env[68569]: _type = "Task" [ 1249.212208] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.219676] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f51911-3c34-2662-ee00-3c2444fe2356, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.259426] env[68569]: DEBUG nova.compute.utils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1249.263469] env[68569]: DEBUG nova.compute.manager [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1249.263646] env[68569]: DEBUG nova.network.neutron [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1249.299300] env[68569]: DEBUG nova.policy [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5038eb62813e43d0b35a7ff07cdd62da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f5444e64380448bac041e3c4fd57865', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1249.365724] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31dcecf8-aace-451b-b212-c99d7d89b3c9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.377209] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8695683b-e31c-4995-9249-586ce3c8336b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.385485] env[68569]: DEBUG oslo_vmware.api [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167960, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.411918] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b519f1b5-8afc-4c06-a2ed-0b771bc430c1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.419414] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e55562c-4674-4341-a1cd-dbfe1b474484 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.434848] env[68569]: DEBUG nova.compute.provider_tree [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1249.613444] env[68569]: DEBUG nova.network.neutron [req-20c79e24-717a-4a3b-93e8-3d285e508d69 req-dc0a844d-1d87-4133-86a3-a8410b8c5f7b service nova] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Updated VIF entry in instance network info cache for port bf205f6e-4996-4b3f-907a-20d3c8f9368f. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1249.613804] env[68569]: DEBUG nova.network.neutron [req-20c79e24-717a-4a3b-93e8-3d285e508d69 req-dc0a844d-1d87-4133-86a3-a8410b8c5f7b service nova] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Updating instance_info_cache with network_info: [{"id": "bf205f6e-4996-4b3f-907a-20d3c8f9368f", "address": "fa:16:3e:c1:fd:6b", "network": {"id": "c5f852b6-eea6-405a-a13f-32dcb0559921", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1163262440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c8884fcdb3d94b23b664f5dcb67d0cc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf205f6e-49", "ovs_interfaceid": "bf205f6e-4996-4b3f-907a-20d3c8f9368f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.618938] env[68569]: DEBUG nova.network.neutron [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Successfully created port: 224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1249.723607] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f51911-3c34-2662-ee00-3c2444fe2356, 'name': SearchDatastore_Task, 'duration_secs': 0.010247} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.723894] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1249.724132] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1249.724587] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.724742] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1249.724926] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1249.725203] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01ba0a41-9dc8-4ced-9903-ea4ddf84dd02 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.737136] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1249.737136] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1249.737136] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afd03ee4-2862-4b44-bb08-894460dde0a6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.742916] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Waiting for the task: (returnval){ [ 1249.742916] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5206ae2d-4ab9-b786-f538-1317f729b54f" [ 1249.742916] env[68569]: _type = "Task" [ 1249.742916] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.751448] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5206ae2d-4ab9-b786-f538-1317f729b54f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.764480] env[68569]: DEBUG nova.compute.manager [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1249.796418] env[68569]: ERROR nova.compute.manager [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Setting instance vm_state to ERROR: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1249.796418] env[68569]: ERROR nova.compute.manager [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Traceback (most recent call last): [ 1249.796418] env[68569]: ERROR nova.compute.manager [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] File "/opt/stack/nova/nova/compute/manager.py", line 11478, in _error_out_instance_on_exception [ 1249.796418] env[68569]: ERROR nova.compute.manager [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] yield [ 1249.796418] env[68569]: ERROR nova.compute.manager [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 1249.796418] env[68569]: ERROR nova.compute.manager [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] disk_info = self.driver.migrate_disk_and_power_off( [ 1249.796418] env[68569]: ERROR nova.compute.manager [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1249.796418] env[68569]: ERROR nova.compute.manager [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] [ 1249.883997] env[68569]: DEBUG oslo_vmware.api [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167960, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.937847] env[68569]: DEBUG nova.scheduler.client.report [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1250.116605] env[68569]: DEBUG oslo_concurrency.lockutils [req-20c79e24-717a-4a3b-93e8-3d285e508d69 req-dc0a844d-1d87-4133-86a3-a8410b8c5f7b service nova] Releasing lock "refresh_cache-9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.254166] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5206ae2d-4ab9-b786-f538-1317f729b54f, 'name': SearchDatastore_Task, 'duration_secs': 0.008821} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.255076] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0752387-392d-4009-91ba-9c1d159379d4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.260547] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Waiting for the task: (returnval){ [ 1250.260547] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525548dd-0577-0e65-87c2-4446b3300cd3" [ 1250.260547] env[68569]: _type = "Task" [ 1250.260547] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.272112] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525548dd-0577-0e65-87c2-4446b3300cd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.317676] env[68569]: INFO nova.compute.manager [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Swapping old allocation on dict_keys(['a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6']) held by migration 33cf9be8-0a37-4c1c-ba92-ea70457625d4 for instance [ 1250.339393] env[68569]: DEBUG nova.scheduler.client.report [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Overwriting current allocation {'allocations': {'a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 184}}, 'project_id': '335f83fe0b8e42aa80e8f0691b609649', 'user_id': '4b8671c22fa042d28350b219ac52d775', 'consumer_generation': 1} on consumer fdcdd4b5-82bd-43c9-8865-807f86789a99 {{(pid=68569) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1250.384603] env[68569]: DEBUG oslo_vmware.api [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167960, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.442129] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.687s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.458946] env[68569]: INFO nova.scheduler.client.report [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Deleted allocations for instance c1b3fe48-aaf4-425a-a97c-e3c9a070db8b [ 1250.773665] env[68569]: DEBUG nova.compute.manager [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1250.775610] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]525548dd-0577-0e65-87c2-4446b3300cd3, 'name': SearchDatastore_Task, 'duration_secs': 0.010635} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.776097] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.776393] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6/9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1250.776843] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c5a9db6-36cf-416a-b4f0-e9d3c1567270 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.785351] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Waiting for the task: (returnval){ [ 1250.785351] env[68569]: value = "task-3167966" [ 1250.785351] env[68569]: _type = "Task" [ 1250.785351] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.794678] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': task-3167966, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.804198] env[68569]: DEBUG nova.virt.hardware [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1250.804447] env[68569]: DEBUG nova.virt.hardware [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1250.804604] env[68569]: DEBUG nova.virt.hardware [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1250.804896] env[68569]: DEBUG nova.virt.hardware [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1250.805101] env[68569]: DEBUG nova.virt.hardware [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1250.805261] env[68569]: DEBUG nova.virt.hardware [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1250.805470] env[68569]: DEBUG nova.virt.hardware [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1250.805627] env[68569]: DEBUG nova.virt.hardware [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1250.805793] env[68569]: DEBUG nova.virt.hardware [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1250.806034] env[68569]: DEBUG nova.virt.hardware [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1250.806250] env[68569]: DEBUG nova.virt.hardware [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1250.807145] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14e9a2d-e9ca-448e-8f22-b14335dcde34 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.815387] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd300db0-7873-4d3e-8aca-eb21ea6ff23d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.886322] env[68569]: DEBUG oslo_vmware.api [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167960, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.966784] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f200c1cc-7273-4a83-ad60-665c5da4d4f2 tempest-ServersListShow298Test-1566312384 tempest-ServersListShow298Test-1566312384-project-member] Lock "c1b3fe48-aaf4-425a-a97c-e3c9a070db8b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.234s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.027705] env[68569]: DEBUG nova.compute.manager [req-7405592e-6600-4a79-9411-de0ea0141c8a req-869955ed-3dd3-442d-9704-7817c73ba902 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Received event network-vif-plugged-224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1251.027959] env[68569]: DEBUG oslo_concurrency.lockutils [req-7405592e-6600-4a79-9411-de0ea0141c8a req-869955ed-3dd3-442d-9704-7817c73ba902 service nova] Acquiring lock "d57645fc-82d5-4ae8-93c3-0de095a66649-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.028273] env[68569]: DEBUG oslo_concurrency.lockutils [req-7405592e-6600-4a79-9411-de0ea0141c8a req-869955ed-3dd3-442d-9704-7817c73ba902 service nova] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.028478] env[68569]: DEBUG oslo_concurrency.lockutils [req-7405592e-6600-4a79-9411-de0ea0141c8a req-869955ed-3dd3-442d-9704-7817c73ba902 service nova] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.028998] env[68569]: DEBUG nova.compute.manager [req-7405592e-6600-4a79-9411-de0ea0141c8a req-869955ed-3dd3-442d-9704-7817c73ba902 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] No waiting events found dispatching network-vif-plugged-224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1251.029247] env[68569]: WARNING nova.compute.manager [req-7405592e-6600-4a79-9411-de0ea0141c8a req-869955ed-3dd3-442d-9704-7817c73ba902 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Received unexpected event network-vif-plugged-224a9b72-1fe5-455e-8f12-0ba6f5dd104a for instance with vm_state building and task_state spawning. [ 1251.126945] env[68569]: DEBUG nova.network.neutron [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Successfully updated port: 224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1251.296528] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': task-3167966, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478709} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.296789] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6/9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1251.297010] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1251.297257] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8b19f070-f9c0-49ac-97c9-9cf34b896cfd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.304142] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Waiting for the task: (returnval){ [ 1251.304142] env[68569]: value = "task-3167967" [ 1251.304142] env[68569]: _type = "Task" [ 1251.304142] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.313988] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': task-3167967, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.386687] env[68569]: DEBUG oslo_vmware.api [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167960, 'name': ReconfigVM_Task, 'duration_secs': 5.861283} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.386939] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1251.387239] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Reconfigured VM to detach interface {{(pid=68569) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1251.473568] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1251.473842] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1251.473930] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1251.474101] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1251.631345] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.631521] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1251.631661] env[68569]: DEBUG nova.network.neutron [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1251.814986] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': task-3167967, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065037} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.815276] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1251.816068] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbaf879-ef9c-4d6f-bb8e-30d4868b9ca6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.839888] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6/9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1251.840165] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9ba56b3-5a0e-4175-a31a-9de386b2ed21 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.861098] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Waiting for the task: (returnval){ [ 1251.861098] env[68569]: value = "task-3167968" [ 1251.861098] env[68569]: _type = "Task" [ 1251.861098] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.869289] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': task-3167968, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.904245] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.904562] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.999882] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7ad999-d2e2-49ba-b35c-b14463b8da19 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.007861] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86eca5b0-f30d-4f62-8cc9-c61bccfef963 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.039245] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd87f12-ec00-4147-b2de-77a45b6c44fb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.047534] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a410fee-34f8-4df3-aa4e-ae60e69ae085 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.061533] env[68569]: DEBUG nova.compute.provider_tree [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1252.164252] env[68569]: DEBUG nova.network.neutron [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1252.294416] env[68569]: DEBUG nova.network.neutron [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Updating instance_info_cache with network_info: [{"id": "224a9b72-1fe5-455e-8f12-0ba6f5dd104a", "address": "fa:16:3e:e4:a9:a8", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap224a9b72-1f", "ovs_interfaceid": "224a9b72-1fe5-455e-8f12-0ba6f5dd104a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.371761] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': task-3167968, 'name': ReconfigVM_Task, 'duration_secs': 0.369239} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.372049] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6/9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1252.372944] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07637514-e0aa-4dc3-a63b-36f3c3e06880 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.379460] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Waiting for the task: (returnval){ [ 1252.379460] env[68569]: value = "task-3167969" [ 1252.379460] env[68569]: _type = "Task" [ 1252.379460] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.387336] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': task-3167969, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.469638] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "fdcdd4b5-82bd-43c9-8865-807f86789a99" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.469903] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "fdcdd4b5-82bd-43c9-8865-807f86789a99" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.470124] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "fdcdd4b5-82bd-43c9-8865-807f86789a99-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.470308] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "fdcdd4b5-82bd-43c9-8865-807f86789a99-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.470501] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "fdcdd4b5-82bd-43c9-8865-807f86789a99-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.472642] env[68569]: INFO nova.compute.manager [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Terminating instance [ 1252.566200] env[68569]: DEBUG nova.scheduler.client.report [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1252.597386] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1252.597576] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1252.693341] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.693538] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquired lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1252.693720] env[68569]: DEBUG nova.network.neutron [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1252.797318] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.797642] env[68569]: DEBUG nova.compute.manager [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Instance network_info: |[{"id": "224a9b72-1fe5-455e-8f12-0ba6f5dd104a", "address": "fa:16:3e:e4:a9:a8", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap224a9b72-1f", "ovs_interfaceid": "224a9b72-1fe5-455e-8f12-0ba6f5dd104a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1252.798398] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:a9:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '224a9b72-1fe5-455e-8f12-0ba6f5dd104a', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1252.805907] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1252.806183] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1252.806407] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78bfd8a4-e495-4d57-8f97-3182f5f7f93d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.826677] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1252.826677] env[68569]: value = "task-3167970" [ 1252.826677] env[68569]: _type = "Task" [ 1252.826677] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.834266] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167970, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.889509] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': task-3167969, 'name': Rename_Task, 'duration_secs': 0.151087} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.889761] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1252.890017] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b80f0a7-9381-49f4-bad0-0687b2bcd27e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.896462] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Waiting for the task: (returnval){ [ 1252.896462] env[68569]: value = "task-3167971" [ 1252.896462] env[68569]: _type = "Task" [ 1252.896462] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.903994] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': task-3167971, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.976245] env[68569]: DEBUG nova.compute.manager [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1252.976603] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1252.977574] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48fad14-ccb5-4095-9811-e34ee3fbb98f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.985939] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1252.986228] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3861b4b6-0ccf-4d82-b2b0-0f8fca100833 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.056721] env[68569]: DEBUG nova.compute.manager [req-cca43cc6-7b45-4497-96ee-335d3088c59f req-df6d52c7-4c2f-431f-b451-e88ffafdb17a service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Received event network-changed-224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1253.056779] env[68569]: DEBUG nova.compute.manager [req-cca43cc6-7b45-4497-96ee-335d3088c59f req-df6d52c7-4c2f-431f-b451-e88ffafdb17a service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Refreshing instance network info cache due to event network-changed-224a9b72-1fe5-455e-8f12-0ba6f5dd104a. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1253.056963] env[68569]: DEBUG oslo_concurrency.lockutils [req-cca43cc6-7b45-4497-96ee-335d3088c59f req-df6d52c7-4c2f-431f-b451-e88ffafdb17a service nova] Acquiring lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.057329] env[68569]: DEBUG oslo_concurrency.lockutils [req-cca43cc6-7b45-4497-96ee-335d3088c59f req-df6d52c7-4c2f-431f-b451-e88ffafdb17a service nova] Acquired lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1253.057555] env[68569]: DEBUG nova.network.neutron [req-cca43cc6-7b45-4497-96ee-335d3088c59f req-df6d52c7-4c2f-431f-b451-e88ffafdb17a service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Refreshing network info cache for port 224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1253.062318] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1253.062440] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1253.062633] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Deleting the datastore file [datastore2] fdcdd4b5-82bd-43c9-8865-807f86789a99 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1253.063350] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b394b7f1-1574-4e9f-904b-cda6342b587a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.072055] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.167s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.072055] env[68569]: INFO nova.compute.manager [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Successfully reverted task state from resize_migrating on failure for instance. [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server [None req-b8aa1eaf-4b46-4cbd-a528-50e323122c60 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Exception during message handling: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 168, in decorated_function [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 159, in decorated_function [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 215, in decorated_function [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 205, in decorated_function [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6583, in resize_instance [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6580, in resize_instance [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server self._resize_instance(context, instance, image, migration, [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server disk_info = self.driver.migrate_disk_and_power_off( [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1253.080183] env[68569]: ERROR oslo_messaging.rpc.server [ 1253.081610] env[68569]: DEBUG oslo_vmware.api [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1253.081610] env[68569]: value = "task-3167973" [ 1253.081610] env[68569]: _type = "Task" [ 1253.081610] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.092121] env[68569]: DEBUG oslo_vmware.api [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167973, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.224051] env[68569]: DEBUG oslo_concurrency.lockutils [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "693d59a2-f8f5-4f63-af55-192b0c458ddf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1253.224589] env[68569]: DEBUG oslo_concurrency.lockutils [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "693d59a2-f8f5-4f63-af55-192b0c458ddf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.224750] env[68569]: DEBUG oslo_concurrency.lockutils [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "693d59a2-f8f5-4f63-af55-192b0c458ddf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1253.224876] env[68569]: DEBUG oslo_concurrency.lockutils [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "693d59a2-f8f5-4f63-af55-192b0c458ddf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.225468] env[68569]: DEBUG oslo_concurrency.lockutils [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "693d59a2-f8f5-4f63-af55-192b0c458ddf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.227316] env[68569]: INFO nova.compute.manager [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Terminating instance [ 1253.340171] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167970, 'name': CreateVM_Task, 'duration_secs': 0.338326} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.340333] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1253.341043] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.341342] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1253.341680] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1253.341845] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b38e3ce1-55b0-4f30-a377-527641955d63 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.348204] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1253.348204] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523e2c17-1a9c-2475-24a6-2bde61f7920f" [ 1253.348204] env[68569]: _type = "Task" [ 1253.348204] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.356668] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523e2c17-1a9c-2475-24a6-2bde61f7920f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.406693] env[68569]: DEBUG oslo_vmware.api [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': task-3167971, 'name': PowerOnVM_Task, 'duration_secs': 0.471177} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.406924] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1253.407145] env[68569]: INFO nova.compute.manager [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Took 7.01 seconds to spawn the instance on the hypervisor. [ 1253.407324] env[68569]: DEBUG nova.compute.manager [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1253.408130] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d1418f-1822-4314-94aa-2d255565a02c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.455286] env[68569]: INFO nova.network.neutron [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Port 9c043357-2161-4ad7-b828-4cc9886cbf9c from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1253.455775] env[68569]: DEBUG nova.network.neutron [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Updating instance_info_cache with network_info: [{"id": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "address": "fa:16:3e:42:d5:d8", "network": {"id": "01780a40-9441-415d-988a-24afe70ba382", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-61510544-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7613bcf5361d4b08a8d864e59b7fe858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bab5cde-e1", "ovs_interfaceid": "5bab5cde-e125-4604-9b7a-f3e491b5e7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.592067] env[68569]: DEBUG oslo_vmware.api [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167973, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139089} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.592433] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1253.592490] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1253.592691] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1253.592876] env[68569]: INFO nova.compute.manager [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1253.593185] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1253.593321] env[68569]: DEBUG nova.compute.manager [-] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1253.593415] env[68569]: DEBUG nova.network.neutron [-] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1253.596915] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1253.597073] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68569) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11160}} [ 1253.597536] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1253.734267] env[68569]: DEBUG nova.compute.manager [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1253.734585] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1253.735398] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef51540-efee-4da2-af7b-66308d3aab2d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.743811] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1253.744065] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78bf85f2-d529-4585-91cb-dcfbfb4de790 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.746776] env[68569]: DEBUG nova.network.neutron [req-cca43cc6-7b45-4497-96ee-335d3088c59f req-df6d52c7-4c2f-431f-b451-e88ffafdb17a service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Updated VIF entry in instance network info cache for port 224a9b72-1fe5-455e-8f12-0ba6f5dd104a. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1253.747129] env[68569]: DEBUG nova.network.neutron [req-cca43cc6-7b45-4497-96ee-335d3088c59f req-df6d52c7-4c2f-431f-b451-e88ffafdb17a service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Updating instance_info_cache with network_info: [{"id": "224a9b72-1fe5-455e-8f12-0ba6f5dd104a", "address": "fa:16:3e:e4:a9:a8", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap224a9b72-1f", "ovs_interfaceid": "224a9b72-1fe5-455e-8f12-0ba6f5dd104a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.752114] env[68569]: DEBUG oslo_vmware.api [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1253.752114] env[68569]: value = "task-3167974" [ 1253.752114] env[68569]: _type = "Task" [ 1253.752114] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.759544] env[68569]: DEBUG oslo_vmware.api [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167974, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.864121] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]523e2c17-1a9c-2475-24a6-2bde61f7920f, 'name': SearchDatastore_Task, 'duration_secs': 0.014664} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.864382] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1253.864617] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1253.864859] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.865016] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1253.865218] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1253.865490] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12d5c605-404b-4171-84fe-a0b074fe9f32 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.880135] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1253.880135] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1253.880135] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c9ad90a-6b40-465d-b54c-5c91a7aff575 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.885652] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1253.885652] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ff40e2-39d6-c456-2c7a-69fada155837" [ 1253.885652] env[68569]: _type = "Task" [ 1253.885652] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.896879] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ff40e2-39d6-c456-2c7a-69fada155837, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.927871] env[68569]: INFO nova.compute.manager [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Took 13.61 seconds to build instance. [ 1253.960680] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Releasing lock "refresh_cache-693d59a2-f8f5-4f63-af55-192b0c458ddf" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1254.102121] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.102121] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.102121] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.102121] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68569) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1254.102121] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770fb01c-9366-4603-8079-b4d9fcfc6602 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.112223] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfcec6c-da8d-4f9e-8edd-4a93d58a8cf2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.126111] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4ccd2a-af87-4957-a8b7-86072608687f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.133907] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487f0405-72d3-46e7-9710-0e7eeb8c5bcc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.164136] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180177MB free_disk=129GB free_vcpus=48 pci_devices=None {{(pid=68569) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1254.164300] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.164595] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.249668] env[68569]: DEBUG oslo_concurrency.lockutils [req-cca43cc6-7b45-4497-96ee-335d3088c59f req-df6d52c7-4c2f-431f-b451-e88ffafdb17a service nova] Releasing lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1254.262153] env[68569]: DEBUG oslo_vmware.api [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167974, 'name': PowerOffVM_Task, 'duration_secs': 0.184812} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.262416] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1254.262573] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1254.262807] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-45a0a5d2-b255-4cc5-9a3b-edfd7d3a22ab {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.305333] env[68569]: DEBUG nova.compute.manager [req-9cdf9f18-439e-4419-8019-d997ea41d7f3 req-717b1a9d-8679-4291-8ea1-22fa8fbbcf10 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Received event network-vif-deleted-234f9512-640a-4c30-9ae1-166d66a910bc {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1254.305606] env[68569]: INFO nova.compute.manager [req-9cdf9f18-439e-4419-8019-d997ea41d7f3 req-717b1a9d-8679-4291-8ea1-22fa8fbbcf10 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Neutron deleted interface 234f9512-640a-4c30-9ae1-166d66a910bc; detaching it from the instance and deleting it from the info cache [ 1254.306043] env[68569]: DEBUG nova.network.neutron [req-9cdf9f18-439e-4419-8019-d997ea41d7f3 req-717b1a9d-8679-4291-8ea1-22fa8fbbcf10 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.330261] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1254.330553] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1254.330821] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Deleting the datastore file [datastore2] 693d59a2-f8f5-4f63-af55-192b0c458ddf {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1254.331187] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-015b32aa-0a29-4f7a-8aa8-d924c9f7d517 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.340189] env[68569]: DEBUG oslo_vmware.api [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1254.340189] env[68569]: value = "task-3167976" [ 1254.340189] env[68569]: _type = "Task" [ 1254.340189] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.348300] env[68569]: DEBUG oslo_vmware.api [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167976, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.402585] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ff40e2-39d6-c456-2c7a-69fada155837, 'name': SearchDatastore_Task, 'duration_secs': 0.02086} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.403969] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f7622b6-8262-47c7-8fb4-acd621809bb8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.411162] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1254.411162] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ccf13b-3751-6af8-e9ad-1d6c0299e2a6" [ 1254.411162] env[68569]: _type = "Task" [ 1254.411162] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.421563] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ccf13b-3751-6af8-e9ad-1d6c0299e2a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.430460] env[68569]: DEBUG oslo_concurrency.lockutils [None req-69b69292-2554-46bc-9929-6fe249057a97 tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Lock "9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.125s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.465539] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f203c355-c92f-4da6-b659-9b34bbe071df tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "interface-693d59a2-f8f5-4f63-af55-192b0c458ddf-9c043357-2161-4ad7-b828-4cc9886cbf9c" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.682s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.782239] env[68569]: DEBUG nova.network.neutron [-] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.809030] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-47d021f9-ed96-49bd-b755-48d31aab6d1e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.819898] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-659d3ac5-7e8b-4b55-8a74-3d0bf2b6147a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.851363] env[68569]: DEBUG nova.compute.manager [req-9cdf9f18-439e-4419-8019-d997ea41d7f3 req-717b1a9d-8679-4291-8ea1-22fa8fbbcf10 service nova] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Detach interface failed, port_id=234f9512-640a-4c30-9ae1-166d66a910bc, reason: Instance fdcdd4b5-82bd-43c9-8865-807f86789a99 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1254.857123] env[68569]: DEBUG oslo_vmware.api [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167976, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184492} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.857390] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1254.857569] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1254.857742] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1254.857911] env[68569]: INFO nova.compute.manager [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1254.858241] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1254.858441] env[68569]: DEBUG nova.compute.manager [-] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1254.858532] env[68569]: DEBUG nova.network.neutron [-] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1254.896385] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Acquiring lock "9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.896657] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Lock "9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.896875] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Acquiring lock "9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.897849] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Lock "9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.899064] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Lock "9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.903640] env[68569]: INFO nova.compute.manager [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Terminating instance [ 1254.926041] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ccf13b-3751-6af8-e9ad-1d6c0299e2a6, 'name': SearchDatastore_Task, 'duration_secs': 0.009749} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.926041] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1254.926041] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] d57645fc-82d5-4ae8-93c3-0de095a66649/d57645fc-82d5-4ae8-93c3-0de095a66649.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1254.926358] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c65d4249-be99-4c06-9a44-010ece84c1ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.934363] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1254.934363] env[68569]: value = "task-3167977" [ 1254.934363] env[68569]: _type = "Task" [ 1254.934363] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.943354] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167977, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.172855] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Applying migration context for instance fdcdd4b5-82bd-43c9-8865-807f86789a99 as it has an incoming, in-progress migration 33cf9be8-0a37-4c1c-ba92-ea70457625d4. Migration status is error {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1255.173953] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=68569) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1563}} [ 1255.199368] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 6317f756-c9ed-4858-bb2a-c20d9f82f90d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.199637] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 693d59a2-f8f5-4f63-af55-192b0c458ddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.199637] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance e4fc902a-05c1-419c-9019-c22fa0f9ae9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.199780] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.199888] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance fdcdd4b5-82bd-43c9-8865-807f86789a99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.200013] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance d57645fc-82d5-4ae8-93c3-0de095a66649 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.200207] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1255.200436] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1255.284598] env[68569]: INFO nova.compute.manager [-] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Took 1.69 seconds to deallocate network for instance. [ 1255.311195] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3105571-82a9-44fe-8698-d9673bf17eac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.327326] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db30431b-c8a1-4037-8fbd-944675feda03 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.387702] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8763a222-801d-4de2-a39d-d980e690d495 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.401711] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081548f7-f3b6-48c7-82b2-48da6734f490 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.408775] env[68569]: DEBUG nova.compute.manager [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1255.409632] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1255.410935] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e02bf50-72b1-428d-9ff2-555347fc78d9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.427069] env[68569]: DEBUG nova.compute.provider_tree [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1255.430984] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1255.431436] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd585cc0-dfe7-4da0-b578-a81a43695feb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.440155] env[68569]: DEBUG oslo_vmware.api [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Waiting for the task: (returnval){ [ 1255.440155] env[68569]: value = "task-3167978" [ 1255.440155] env[68569]: _type = "Task" [ 1255.440155] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.446822] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167977, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.452902] env[68569]: DEBUG oslo_vmware.api [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': task-3167978, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.799391] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.933139] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1255.947129] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167977, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526137} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.948241] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] d57645fc-82d5-4ae8-93c3-0de095a66649/d57645fc-82d5-4ae8-93c3-0de095a66649.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1255.948460] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1255.948684] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ea18f94-8967-4680-817b-c6ecf32801d0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.953517] env[68569]: DEBUG oslo_vmware.api [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': task-3167978, 'name': PowerOffVM_Task, 'duration_secs': 0.19692} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.954034] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1255.954206] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1255.954419] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88692d3b-8e0f-4d6c-8b7e-c3f536c5265d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.958664] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1255.958664] env[68569]: value = "task-3167979" [ 1255.958664] env[68569]: _type = "Task" [ 1255.958664] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.967899] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167979, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.037207] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1256.037586] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1256.037843] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Deleting the datastore file [datastore1] 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1256.038264] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d01974e5-c5b5-4a63-8bae-9831e199464d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.041990] env[68569]: DEBUG nova.network.neutron [-] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.048322] env[68569]: DEBUG oslo_vmware.api [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Waiting for the task: (returnval){ [ 1256.048322] env[68569]: value = "task-3167981" [ 1256.048322] env[68569]: _type = "Task" [ 1256.048322] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.057329] env[68569]: DEBUG oslo_vmware.api [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': task-3167981, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.332786] env[68569]: DEBUG nova.compute.manager [req-ea9513bb-31e1-4253-bc48-68bd76b85701 req-5435209f-b037-45fe-9782-a57e4cf70401 service nova] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Received event network-vif-deleted-5bab5cde-e125-4604-9b7a-f3e491b5e7c8 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1256.440527] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1256.440777] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.276s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1256.441057] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.642s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1256.441306] env[68569]: DEBUG nova.objects.instance [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lazy-loading 'resources' on Instance uuid fdcdd4b5-82bd-43c9-8865-807f86789a99 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1256.468811] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167979, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095129} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.469085] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1256.469799] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b29b0d-4414-429d-834a-c74cc7268b0d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.491344] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] d57645fc-82d5-4ae8-93c3-0de095a66649/d57645fc-82d5-4ae8-93c3-0de095a66649.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1256.491580] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dfab7eac-ef85-4b2e-8c79-2f62c44eb5e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.521451] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1256.521451] env[68569]: value = "task-3167982" [ 1256.521451] env[68569]: _type = "Task" [ 1256.521451] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.529740] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167982, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.545507] env[68569]: INFO nova.compute.manager [-] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Took 1.69 seconds to deallocate network for instance. [ 1256.559026] env[68569]: DEBUG oslo_vmware.api [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Task: {'id': task-3167981, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201745} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.559331] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1256.559530] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1256.559711] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1256.559871] env[68569]: INFO nova.compute.manager [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1256.560120] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1256.560308] env[68569]: DEBUG nova.compute.manager [-] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1256.560403] env[68569]: DEBUG nova.network.neutron [-] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1257.027069] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244672d1-e803-4aa8-8e41-57d23e2f2240 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.036738] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167982, 'name': ReconfigVM_Task, 'duration_secs': 0.329765} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.037089] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Reconfigured VM instance instance-00000077 to attach disk [datastore1] d57645fc-82d5-4ae8-93c3-0de095a66649/d57645fc-82d5-4ae8-93c3-0de095a66649.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1257.038428] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05bf941d-7c3a-4c72-a5c0-641cf0d81914 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.041292] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f510a33-4a60-49a3-b88e-0c0bc7cb2aeb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.072775] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1257.072775] env[68569]: value = "task-3167983" [ 1257.072775] env[68569]: _type = "Task" [ 1257.072775] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.073785] env[68569]: DEBUG oslo_concurrency.lockutils [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.075237] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1592067a-cc67-42d9-91ba-7fa40591fab4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.085964] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167983, 'name': Rename_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.088951] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ad8d48-3b0f-47bc-a2d8-7fbf9405f173 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.102573] env[68569]: DEBUG nova.compute.provider_tree [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1257.295358] env[68569]: DEBUG nova.network.neutron [-] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.584420] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167983, 'name': Rename_Task, 'duration_secs': 0.149596} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.584609] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1257.584831] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec662646-12bd-4afb-b8c7-d81be4f47307 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.591371] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1257.591371] env[68569]: value = "task-3167984" [ 1257.591371] env[68569]: _type = "Task" [ 1257.591371] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.598400] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167984, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.605371] env[68569]: DEBUG nova.scheduler.client.report [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1257.797986] env[68569]: INFO nova.compute.manager [-] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Took 1.24 seconds to deallocate network for instance. [ 1258.106026] env[68569]: DEBUG oslo_vmware.api [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3167984, 'name': PowerOnVM_Task, 'duration_secs': 0.446232} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.106471] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1258.106783] env[68569]: INFO nova.compute.manager [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Took 7.33 seconds to spawn the instance on the hypervisor. [ 1258.107076] env[68569]: DEBUG nova.compute.manager [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1258.108224] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abcbc801-2c69-4195-8b09-fba604efda35 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.112369] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.671s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.115596] env[68569]: DEBUG oslo_concurrency.lockutils [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.041s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1258.115596] env[68569]: DEBUG nova.objects.instance [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'resources' on Instance uuid 693d59a2-f8f5-4f63-af55-192b0c458ddf {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1258.137458] env[68569]: INFO nova.scheduler.client.report [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Deleted allocations for instance fdcdd4b5-82bd-43c9-8865-807f86789a99 [ 1258.304470] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1258.358494] env[68569]: DEBUG nova.compute.manager [req-f4dc3478-0fdb-4029-a2b6-b63bd88a19c1 req-17c98489-bf4f-467b-9056-663b57e171fd service nova] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Received event network-vif-deleted-bf205f6e-4996-4b3f-907a-20d3c8f9368f {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1258.632852] env[68569]: INFO nova.compute.manager [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Took 14.38 seconds to build instance. [ 1258.648563] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a14317cd-5751-43d1-b4e0-a021f7710b0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "fdcdd4b5-82bd-43c9-8865-807f86789a99" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.179s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.690269] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb40b97-0fc3-49a4-be72-a66a7030e5c1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.697846] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95712bf-6b46-4e35-ac4e-9f7801aa668f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.727773] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a9e19b-6e9f-4c4c-8c98-b06b5c817c19 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.734969] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059b908c-88a4-4cb4-9cf4-ec000b5da1bf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.748564] env[68569]: DEBUG nova.compute.provider_tree [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1259.137315] env[68569]: DEBUG oslo_concurrency.lockutils [None req-95d72641-eafe-4f93-8606-1a443b1b0314 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.891s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.252544] env[68569]: DEBUG nova.scheduler.client.report [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1259.757167] env[68569]: DEBUG oslo_concurrency.lockutils [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.642s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.761037] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.456s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1259.761037] env[68569]: DEBUG nova.objects.instance [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Lazy-loading 'resources' on Instance uuid 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1259.778071] env[68569]: INFO nova.scheduler.client.report [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Deleted allocations for instance 693d59a2-f8f5-4f63-af55-192b0c458ddf [ 1260.108989] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "1055b531-4bca-4245-ab61-698c21b9e484" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.109251] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "1055b531-4bca-4245-ab61-698c21b9e484" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.286147] env[68569]: DEBUG oslo_concurrency.lockutils [None req-286ede2d-9fa8-42ef-b404-a9849575aba1 tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "693d59a2-f8f5-4f63-af55-192b0c458ddf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.062s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.334621] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc4ea711-becf-4042-bbcb-0e4ed20d7c7a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.342635] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ee2b6f-46d9-4875-a40c-11dd7dc4a502 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.374303] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52bb30f-90bd-4018-91bc-33951843cd30 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.384672] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9642e2b9-3326-4c83-a409-09ac67345533 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.389738] env[68569]: DEBUG nova.compute.manager [req-51ff9167-57ea-4ca9-bb1c-a135979b1ee0 req-3efb8212-78b0-439a-b5a5-bae3f4cc46f1 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Received event network-changed-224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1260.389927] env[68569]: DEBUG nova.compute.manager [req-51ff9167-57ea-4ca9-bb1c-a135979b1ee0 req-3efb8212-78b0-439a-b5a5-bae3f4cc46f1 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Refreshing instance network info cache due to event network-changed-224a9b72-1fe5-455e-8f12-0ba6f5dd104a. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1260.390158] env[68569]: DEBUG oslo_concurrency.lockutils [req-51ff9167-57ea-4ca9-bb1c-a135979b1ee0 req-3efb8212-78b0-439a-b5a5-bae3f4cc46f1 service nova] Acquiring lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1260.390300] env[68569]: DEBUG oslo_concurrency.lockutils [req-51ff9167-57ea-4ca9-bb1c-a135979b1ee0 req-3efb8212-78b0-439a-b5a5-bae3f4cc46f1 service nova] Acquired lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1260.390458] env[68569]: DEBUG nova.network.neutron [req-51ff9167-57ea-4ca9-bb1c-a135979b1ee0 req-3efb8212-78b0-439a-b5a5-bae3f4cc46f1 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Refreshing network info cache for port 224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1260.406938] env[68569]: DEBUG nova.compute.provider_tree [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1260.612129] env[68569]: DEBUG nova.compute.manager [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1260.910372] env[68569]: DEBUG nova.scheduler.client.report [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1260.938425] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.939219] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.939430] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.939616] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.939786] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.941841] env[68569]: INFO nova.compute.manager [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Terminating instance [ 1261.088351] env[68569]: DEBUG nova.network.neutron [req-51ff9167-57ea-4ca9-bb1c-a135979b1ee0 req-3efb8212-78b0-439a-b5a5-bae3f4cc46f1 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Updated VIF entry in instance network info cache for port 224a9b72-1fe5-455e-8f12-0ba6f5dd104a. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1261.088785] env[68569]: DEBUG nova.network.neutron [req-51ff9167-57ea-4ca9-bb1c-a135979b1ee0 req-3efb8212-78b0-439a-b5a5-bae3f4cc46f1 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Updating instance_info_cache with network_info: [{"id": "224a9b72-1fe5-455e-8f12-0ba6f5dd104a", "address": "fa:16:3e:e4:a9:a8", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap224a9b72-1f", "ovs_interfaceid": "224a9b72-1fe5-455e-8f12-0ba6f5dd104a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1261.131154] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.416558] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.656s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.419489] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.288s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.421034] env[68569]: INFO nova.compute.claims [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1261.434219] env[68569]: INFO nova.scheduler.client.report [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Deleted allocations for instance 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6 [ 1261.437602] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.445346] env[68569]: DEBUG nova.compute.manager [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1261.445555] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1261.446450] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9e1559-2b55-447e-82a3-19d20cc81e80 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.455414] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1261.455648] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-913fb9ee-148b-4299-b33d-d6b7e7716aaf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.462637] env[68569]: DEBUG oslo_vmware.api [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1261.462637] env[68569]: value = "task-3167985" [ 1261.462637] env[68569]: _type = "Task" [ 1261.462637] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.471849] env[68569]: DEBUG oslo_vmware.api [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.591511] env[68569]: DEBUG oslo_concurrency.lockutils [req-51ff9167-57ea-4ca9-bb1c-a135979b1ee0 req-3efb8212-78b0-439a-b5a5-bae3f4cc46f1 service nova] Releasing lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1261.943425] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8d94e417-6a61-4795-b0ef-99ce4ac2600f tempest-ServerGroupTestJSON-1006457124 tempest-ServerGroupTestJSON-1006457124-project-member] Lock "9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.047s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.972314] env[68569]: DEBUG oslo_vmware.api [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167985, 'name': PowerOffVM_Task, 'duration_secs': 0.37301} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.972624] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1261.972795] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1261.973091] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-591dd3d2-a98d-457f-8be9-ab172daa4ec4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.043055] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1262.043317] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1262.043503] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Deleting the datastore file [datastore1] 6317f756-c9ed-4858-bb2a-c20d9f82f90d {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1262.043784] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6657e7f4-09a7-4ea2-9e77-a05bfff71de8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.051843] env[68569]: DEBUG oslo_vmware.api [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for the task: (returnval){ [ 1262.051843] env[68569]: value = "task-3167987" [ 1262.051843] env[68569]: _type = "Task" [ 1262.051843] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.061829] env[68569]: DEBUG oslo_vmware.api [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167987, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.504454] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14be1a20-ff20-4052-9e08-bc7ad2d14b96 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.515482] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5777a231-2621-45b9-868a-9619fa3cf1c6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.574980] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fd1bcd-30e6-4b41-b5b3-e03e4778444c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.587333] env[68569]: DEBUG oslo_vmware.api [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Task: {'id': task-3167987, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.378256} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.590669] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1262.591915] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1262.591915] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1262.591915] env[68569]: INFO nova.compute.manager [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1262.591915] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1262.592936] env[68569]: DEBUG nova.compute.manager [-] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1262.592936] env[68569]: DEBUG nova.network.neutron [-] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1262.595244] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f49b5c9-d174-4ac9-ac0d-4ac7f9a3f8b7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.611212] env[68569]: DEBUG nova.compute.provider_tree [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1262.872181] env[68569]: DEBUG nova.compute.manager [req-86ae2d42-f118-4b0a-9c36-cbfcdf904017 req-6d87177a-c589-4252-8518-cc663aa0f736 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Received event network-vif-deleted-1ca2f3e3-cee0-4e29-8728-97455622c4be {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1262.872181] env[68569]: INFO nova.compute.manager [req-86ae2d42-f118-4b0a-9c36-cbfcdf904017 req-6d87177a-c589-4252-8518-cc663aa0f736 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Neutron deleted interface 1ca2f3e3-cee0-4e29-8728-97455622c4be; detaching it from the instance and deleting it from the info cache [ 1262.872181] env[68569]: DEBUG nova.network.neutron [req-86ae2d42-f118-4b0a-9c36-cbfcdf904017 req-6d87177a-c589-4252-8518-cc663aa0f736 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.113690] env[68569]: DEBUG nova.scheduler.client.report [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1263.350986] env[68569]: DEBUG nova.network.neutron [-] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.374479] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cd951ab8-f8ad-4229-af15-9fb9fbe24e99 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.388032] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3685c2db-673b-40bb-9e66-4d0400798f92 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.415809] env[68569]: DEBUG nova.compute.manager [req-86ae2d42-f118-4b0a-9c36-cbfcdf904017 req-6d87177a-c589-4252-8518-cc663aa0f736 service nova] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Detach interface failed, port_id=1ca2f3e3-cee0-4e29-8728-97455622c4be, reason: Instance 6317f756-c9ed-4858-bb2a-c20d9f82f90d could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1263.618570] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.199s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.618962] env[68569]: DEBUG nova.compute.manager [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1263.855465] env[68569]: INFO nova.compute.manager [-] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Took 1.26 seconds to deallocate network for instance. [ 1264.125063] env[68569]: DEBUG nova.compute.utils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1264.125616] env[68569]: DEBUG nova.compute.manager [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1264.125820] env[68569]: DEBUG nova.network.neutron [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1264.169643] env[68569]: DEBUG nova.policy [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b8671c22fa042d28350b219ac52d775', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '335f83fe0b8e42aa80e8f0691b609649', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1264.362814] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1264.363142] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.363365] env[68569]: DEBUG nova.objects.instance [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lazy-loading 'resources' on Instance uuid 6317f756-c9ed-4858-bb2a-c20d9f82f90d {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1264.431868] env[68569]: DEBUG nova.network.neutron [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Successfully created port: 6d8ce8ac-d9c6-406f-9778-83b5837c1d87 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1264.628913] env[68569]: DEBUG nova.compute.manager [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1264.931293] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e487254-e9ab-4fe1-94d3-de23fa029480 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.939513] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad2c334-8191-47c5-87cf-e84734749e57 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.970583] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3743591b-df43-4d63-a98c-611a1f96ec4e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.978519] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a88bef9-5e97-49c1-946a-e229eb873b64 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.991872] env[68569]: DEBUG nova.compute.provider_tree [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1265.494912] env[68569]: DEBUG nova.scheduler.client.report [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1265.639486] env[68569]: DEBUG nova.compute.manager [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1265.667467] env[68569]: DEBUG nova.virt.hardware [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1265.667719] env[68569]: DEBUG nova.virt.hardware [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1265.667875] env[68569]: DEBUG nova.virt.hardware [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1265.668062] env[68569]: DEBUG nova.virt.hardware [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1265.668240] env[68569]: DEBUG nova.virt.hardware [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1265.668394] env[68569]: DEBUG nova.virt.hardware [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1265.668601] env[68569]: DEBUG nova.virt.hardware [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1265.668758] env[68569]: DEBUG nova.virt.hardware [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1265.668920] env[68569]: DEBUG nova.virt.hardware [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1265.669091] env[68569]: DEBUG nova.virt.hardware [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1265.669359] env[68569]: DEBUG nova.virt.hardware [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1265.670264] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4448b8d-0439-4157-8d80-52779629b8aa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.679703] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9c2e35-c82b-438e-85a2-7c57eed334a4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.775084] env[68569]: DEBUG nova.compute.manager [req-323462b7-37a8-4f2f-ac31-4a1040e18b60 req-30cb6331-9e09-4b62-b9ae-f2b72dedddda service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Received event network-vif-plugged-6d8ce8ac-d9c6-406f-9778-83b5837c1d87 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1265.775350] env[68569]: DEBUG oslo_concurrency.lockutils [req-323462b7-37a8-4f2f-ac31-4a1040e18b60 req-30cb6331-9e09-4b62-b9ae-f2b72dedddda service nova] Acquiring lock "1055b531-4bca-4245-ab61-698c21b9e484-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.775575] env[68569]: DEBUG oslo_concurrency.lockutils [req-323462b7-37a8-4f2f-ac31-4a1040e18b60 req-30cb6331-9e09-4b62-b9ae-f2b72dedddda service nova] Lock "1055b531-4bca-4245-ab61-698c21b9e484-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.775736] env[68569]: DEBUG oslo_concurrency.lockutils [req-323462b7-37a8-4f2f-ac31-4a1040e18b60 req-30cb6331-9e09-4b62-b9ae-f2b72dedddda service nova] Lock "1055b531-4bca-4245-ab61-698c21b9e484-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.775898] env[68569]: DEBUG nova.compute.manager [req-323462b7-37a8-4f2f-ac31-4a1040e18b60 req-30cb6331-9e09-4b62-b9ae-f2b72dedddda service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] No waiting events found dispatching network-vif-plugged-6d8ce8ac-d9c6-406f-9778-83b5837c1d87 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1265.776319] env[68569]: WARNING nova.compute.manager [req-323462b7-37a8-4f2f-ac31-4a1040e18b60 req-30cb6331-9e09-4b62-b9ae-f2b72dedddda service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Received unexpected event network-vif-plugged-6d8ce8ac-d9c6-406f-9778-83b5837c1d87 for instance with vm_state building and task_state spawning. [ 1265.857764] env[68569]: DEBUG nova.network.neutron [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Successfully updated port: 6d8ce8ac-d9c6-406f-9778-83b5837c1d87 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1266.000491] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.637s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1266.019009] env[68569]: INFO nova.scheduler.client.report [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Deleted allocations for instance 6317f756-c9ed-4858-bb2a-c20d9f82f90d [ 1266.362894] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "refresh_cache-1055b531-4bca-4245-ab61-698c21b9e484" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1266.363061] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired lock "refresh_cache-1055b531-4bca-4245-ab61-698c21b9e484" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1266.363224] env[68569]: DEBUG nova.network.neutron [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1266.526388] env[68569]: DEBUG oslo_concurrency.lockutils [None req-346ee4e6-5f8b-4e7f-8ce7-551afb99b6ce tempest-AttachInterfacesTestJSON-964810485 tempest-AttachInterfacesTestJSON-964810485-project-member] Lock "6317f756-c9ed-4858-bb2a-c20d9f82f90d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.587s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1266.896521] env[68569]: DEBUG nova.network.neutron [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1267.022691] env[68569]: DEBUG nova.network.neutron [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Updating instance_info_cache with network_info: [{"id": "6d8ce8ac-d9c6-406f-9778-83b5837c1d87", "address": "fa:16:3e:d4:c4:21", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d8ce8ac-d9", "ovs_interfaceid": "6d8ce8ac-d9c6-406f-9778-83b5837c1d87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1267.525903] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lock "refresh_cache-1055b531-4bca-4245-ab61-698c21b9e484" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1267.526331] env[68569]: DEBUG nova.compute.manager [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Instance network_info: |[{"id": "6d8ce8ac-d9c6-406f-9778-83b5837c1d87", "address": "fa:16:3e:d4:c4:21", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d8ce8ac-d9", "ovs_interfaceid": "6d8ce8ac-d9c6-406f-9778-83b5837c1d87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1267.526825] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:c4:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d8ce8ac-d9c6-406f-9778-83b5837c1d87', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1267.535115] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1267.535282] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1267.535506] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d447b438-01bf-4bfb-83f1-dbef6ad03b11 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.559795] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1267.559795] env[68569]: value = "task-3167988" [ 1267.559795] env[68569]: _type = "Task" [ 1267.559795] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.573251] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167988, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.807496] env[68569]: DEBUG nova.compute.manager [req-547dc0c5-8869-470b-9761-27fee2ae6e67 req-64299719-cebc-4240-8c8c-dc2f25c99c65 service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Received event network-changed-6d8ce8ac-d9c6-406f-9778-83b5837c1d87 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1267.807686] env[68569]: DEBUG nova.compute.manager [req-547dc0c5-8869-470b-9761-27fee2ae6e67 req-64299719-cebc-4240-8c8c-dc2f25c99c65 service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Refreshing instance network info cache due to event network-changed-6d8ce8ac-d9c6-406f-9778-83b5837c1d87. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1267.807903] env[68569]: DEBUG oslo_concurrency.lockutils [req-547dc0c5-8869-470b-9761-27fee2ae6e67 req-64299719-cebc-4240-8c8c-dc2f25c99c65 service nova] Acquiring lock "refresh_cache-1055b531-4bca-4245-ab61-698c21b9e484" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1267.808092] env[68569]: DEBUG oslo_concurrency.lockutils [req-547dc0c5-8869-470b-9761-27fee2ae6e67 req-64299719-cebc-4240-8c8c-dc2f25c99c65 service nova] Acquired lock "refresh_cache-1055b531-4bca-4245-ab61-698c21b9e484" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1267.808393] env[68569]: DEBUG nova.network.neutron [req-547dc0c5-8869-470b-9761-27fee2ae6e67 req-64299719-cebc-4240-8c8c-dc2f25c99c65 service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Refreshing network info cache for port 6d8ce8ac-d9c6-406f-9778-83b5837c1d87 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1268.070521] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167988, 'name': CreateVM_Task, 'duration_secs': 0.320899} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.070805] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1268.071390] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.071557] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1268.071871] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1268.072143] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-656eaf59-ffef-4318-8f99-a091dd490199 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.076937] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1268.076937] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e4d39a-67db-5952-743a-30b433c3aaed" [ 1268.076937] env[68569]: _type = "Task" [ 1268.076937] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.085254] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e4d39a-67db-5952-743a-30b433c3aaed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.501405] env[68569]: DEBUG nova.network.neutron [req-547dc0c5-8869-470b-9761-27fee2ae6e67 req-64299719-cebc-4240-8c8c-dc2f25c99c65 service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Updated VIF entry in instance network info cache for port 6d8ce8ac-d9c6-406f-9778-83b5837c1d87. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1268.501405] env[68569]: DEBUG nova.network.neutron [req-547dc0c5-8869-470b-9761-27fee2ae6e67 req-64299719-cebc-4240-8c8c-dc2f25c99c65 service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Updating instance_info_cache with network_info: [{"id": "6d8ce8ac-d9c6-406f-9778-83b5837c1d87", "address": "fa:16:3e:d4:c4:21", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d8ce8ac-d9", "ovs_interfaceid": "6d8ce8ac-d9c6-406f-9778-83b5837c1d87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.588232] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e4d39a-67db-5952-743a-30b433c3aaed, 'name': SearchDatastore_Task, 'duration_secs': 0.0103} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.588232] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1268.588724] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1268.589626] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.589626] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1268.589626] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1268.589626] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f749932-8500-47ec-b07f-f0f82423c3ca {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.601236] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1268.601422] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1268.602172] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5db78111-d587-4760-9c71-596d033f4e90 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.607812] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1268.607812] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a0d47e-ec40-57f1-b494-09c9de92e91f" [ 1268.607812] env[68569]: _type = "Task" [ 1268.607812] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.617392] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a0d47e-ec40-57f1-b494-09c9de92e91f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.003747] env[68569]: DEBUG oslo_concurrency.lockutils [req-547dc0c5-8869-470b-9761-27fee2ae6e67 req-64299719-cebc-4240-8c8c-dc2f25c99c65 service nova] Releasing lock "refresh_cache-1055b531-4bca-4245-ab61-698c21b9e484" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1269.120711] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a0d47e-ec40-57f1-b494-09c9de92e91f, 'name': SearchDatastore_Task, 'duration_secs': 0.009891} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.121599] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b48f41ed-50ac-4530-b87e-99e448e0db99 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.128319] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1269.128319] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52be1d5c-bce4-ddc0-7a1a-f91b034070d2" [ 1269.128319] env[68569]: _type = "Task" [ 1269.128319] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.137054] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52be1d5c-bce4-ddc0-7a1a-f91b034070d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.640751] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52be1d5c-bce4-ddc0-7a1a-f91b034070d2, 'name': SearchDatastore_Task, 'duration_secs': 0.016208} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.641049] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1269.641304] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 1055b531-4bca-4245-ab61-698c21b9e484/1055b531-4bca-4245-ab61-698c21b9e484.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1269.641562] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82e55924-ced5-4478-9a0e-0c100c1152d0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.648537] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1269.648537] env[68569]: value = "task-3167989" [ 1269.648537] env[68569]: _type = "Task" [ 1269.648537] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.657113] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167989, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.158638] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167989, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.432499} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.159046] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 1055b531-4bca-4245-ab61-698c21b9e484/1055b531-4bca-4245-ab61-698c21b9e484.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1270.159146] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1270.159367] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9fa31333-30db-4006-9354-01cfbfb3bb3f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.167680] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1270.167680] env[68569]: value = "task-3167990" [ 1270.167680] env[68569]: _type = "Task" [ 1270.167680] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.175713] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167990, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.317761] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquiring lock "87acc843-9940-4887-81ff-3fba98343340" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1270.317976] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Lock "87acc843-9940-4887-81ff-3fba98343340" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1270.678654] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167990, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.256403} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.679360] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1270.679930] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a714427-7013-4a08-8825-3b429fa1d950 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.701316] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 1055b531-4bca-4245-ab61-698c21b9e484/1055b531-4bca-4245-ab61-698c21b9e484.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1270.701548] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cbcc021-fd6b-44a6-b3d5-246302783970 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.720847] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1270.720847] env[68569]: value = "task-3167991" [ 1270.720847] env[68569]: _type = "Task" [ 1270.720847] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.728315] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167991, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.820604] env[68569]: DEBUG nova.compute.manager [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1271.230947] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167991, 'name': ReconfigVM_Task, 'duration_secs': 0.282412} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.231311] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 1055b531-4bca-4245-ab61-698c21b9e484/1055b531-4bca-4245-ab61-698c21b9e484.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1271.231901] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-233f1a79-5757-4cbb-b9f5-9b388b5e28d5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.238020] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1271.238020] env[68569]: value = "task-3167992" [ 1271.238020] env[68569]: _type = "Task" [ 1271.238020] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.245668] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167992, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.344061] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1271.344353] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1271.346154] env[68569]: INFO nova.compute.claims [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1271.750570] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167992, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.249557] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167992, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.415191] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ea3add-4967-4054-b9b8-9120eabb8992 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.423279] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ca0ad9-f5e7-4a06-9cca-1a581346a452 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.453619] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066f0a99-1008-4a28-a991-b0b4ad2a68cd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.461228] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd256b8f-3726-4ab4-a5e6-9f63db1d9ade {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.473994] env[68569]: DEBUG nova.compute.provider_tree [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1272.750843] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167992, 'name': Rename_Task, 'duration_secs': 1.156696} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.751147] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1272.751403] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7742ac95-d7b9-45f3-9fe1-7aae64068d1c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.759009] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1272.759009] env[68569]: value = "task-3167993" [ 1272.759009] env[68569]: _type = "Task" [ 1272.759009] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.766913] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167993, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.977625] env[68569]: DEBUG nova.scheduler.client.report [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1273.270998] env[68569]: DEBUG oslo_vmware.api [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3167993, 'name': PowerOnVM_Task, 'duration_secs': 0.454142} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.271432] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1273.271432] env[68569]: INFO nova.compute.manager [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Took 7.63 seconds to spawn the instance on the hypervisor. [ 1273.271575] env[68569]: DEBUG nova.compute.manager [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1273.272386] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c82595-7c64-4967-8eb3-73384fd31790 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.483193] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.139s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.483908] env[68569]: DEBUG nova.compute.manager [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1273.791232] env[68569]: INFO nova.compute.manager [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Took 12.67 seconds to build instance. [ 1273.989433] env[68569]: DEBUG nova.compute.utils [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1273.990874] env[68569]: DEBUG nova.compute.manager [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Not allocating networking since 'none' was specified. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1274.293347] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8caf5e89-be54-41d1-b583-3d13c3875138 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "1055b531-4bca-4245-ab61-698c21b9e484" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.184s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.394262] env[68569]: DEBUG nova.compute.manager [req-2be039fd-a9bf-411d-85d2-2e61f8892ff3 req-4722db82-dce4-4791-a853-287e8e1b38f5 service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Received event network-changed-6d8ce8ac-d9c6-406f-9778-83b5837c1d87 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1274.394515] env[68569]: DEBUG nova.compute.manager [req-2be039fd-a9bf-411d-85d2-2e61f8892ff3 req-4722db82-dce4-4791-a853-287e8e1b38f5 service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Refreshing instance network info cache due to event network-changed-6d8ce8ac-d9c6-406f-9778-83b5837c1d87. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1274.394783] env[68569]: DEBUG oslo_concurrency.lockutils [req-2be039fd-a9bf-411d-85d2-2e61f8892ff3 req-4722db82-dce4-4791-a853-287e8e1b38f5 service nova] Acquiring lock "refresh_cache-1055b531-4bca-4245-ab61-698c21b9e484" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.394969] env[68569]: DEBUG oslo_concurrency.lockutils [req-2be039fd-a9bf-411d-85d2-2e61f8892ff3 req-4722db82-dce4-4791-a853-287e8e1b38f5 service nova] Acquired lock "refresh_cache-1055b531-4bca-4245-ab61-698c21b9e484" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1274.395199] env[68569]: DEBUG nova.network.neutron [req-2be039fd-a9bf-411d-85d2-2e61f8892ff3 req-4722db82-dce4-4791-a853-287e8e1b38f5 service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Refreshing network info cache for port 6d8ce8ac-d9c6-406f-9778-83b5837c1d87 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1274.493998] env[68569]: DEBUG nova.compute.manager [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1274.516469] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4bf2ed6-d533-49f9-87d0-1c963a3ddb3f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "e4fc902a-05c1-419c-9019-c22fa0f9ae9d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1274.516749] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4bf2ed6-d533-49f9-87d0-1c963a3ddb3f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "e4fc902a-05c1-419c-9019-c22fa0f9ae9d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1274.516956] env[68569]: DEBUG nova.compute.manager [None req-d4bf2ed6-d533-49f9-87d0-1c963a3ddb3f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1274.518173] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7dcc1d-8da0-4597-8dae-dc1ef2cb70a3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.526417] env[68569]: DEBUG nova.compute.manager [None req-d4bf2ed6-d533-49f9-87d0-1c963a3ddb3f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68569) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1274.526997] env[68569]: DEBUG nova.objects.instance [None req-d4bf2ed6-d533-49f9-87d0-1c963a3ddb3f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lazy-loading 'flavor' on Instance uuid e4fc902a-05c1-419c-9019-c22fa0f9ae9d {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1275.107975] env[68569]: DEBUG nova.network.neutron [req-2be039fd-a9bf-411d-85d2-2e61f8892ff3 req-4722db82-dce4-4791-a853-287e8e1b38f5 service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Updated VIF entry in instance network info cache for port 6d8ce8ac-d9c6-406f-9778-83b5837c1d87. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1275.108420] env[68569]: DEBUG nova.network.neutron [req-2be039fd-a9bf-411d-85d2-2e61f8892ff3 req-4722db82-dce4-4791-a853-287e8e1b38f5 service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Updating instance_info_cache with network_info: [{"id": "6d8ce8ac-d9c6-406f-9778-83b5837c1d87", "address": "fa:16:3e:d4:c4:21", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d8ce8ac-d9", "ovs_interfaceid": "6d8ce8ac-d9c6-406f-9778-83b5837c1d87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.502124] env[68569]: DEBUG nova.compute.manager [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1275.528746] env[68569]: DEBUG nova.virt.hardware [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1275.528992] env[68569]: DEBUG nova.virt.hardware [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1275.529165] env[68569]: DEBUG nova.virt.hardware [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1275.529378] env[68569]: DEBUG nova.virt.hardware [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1275.529528] env[68569]: DEBUG nova.virt.hardware [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1275.529673] env[68569]: DEBUG nova.virt.hardware [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1275.529889] env[68569]: DEBUG nova.virt.hardware [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1275.530132] env[68569]: DEBUG nova.virt.hardware [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1275.530305] env[68569]: DEBUG nova.virt.hardware [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1275.530467] env[68569]: DEBUG nova.virt.hardware [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1275.530639] env[68569]: DEBUG nova.virt.hardware [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1275.531508] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030d4139-8c83-4116-add5-5d30c4a5bf2c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.534282] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4bf2ed6-d533-49f9-87d0-1c963a3ddb3f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1275.534518] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ec9d57c-784d-4291-8b3b-356992264349 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.544372] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae35751-1be0-4217-a6f0-418adb441a0f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.549394] env[68569]: DEBUG oslo_vmware.api [None req-d4bf2ed6-d533-49f9-87d0-1c963a3ddb3f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1275.549394] env[68569]: value = "task-3167994" [ 1275.549394] env[68569]: _type = "Task" [ 1275.549394] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.561932] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1275.567641] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Creating folder: Project (c9c6d78089974afe8c0edafdc16e4155). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1275.570816] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26b39428-e5d4-472a-9dfe-62446b836527 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.572459] env[68569]: DEBUG oslo_vmware.api [None req-d4bf2ed6-d533-49f9-87d0-1c963a3ddb3f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167994, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.582109] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Created folder: Project (c9c6d78089974afe8c0edafdc16e4155) in parent group-v633430. [ 1275.582381] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Creating folder: Instances. Parent ref: group-v633753. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1275.582633] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad9f92e8-4a19-4a1c-906b-7000330a46a9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.592760] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Created folder: Instances in parent group-v633753. [ 1275.592995] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1275.593232] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87acc843-9940-4887-81ff-3fba98343340] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1275.593448] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3467d08-aa9c-4998-a877-8a435a721c5d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.610809] env[68569]: DEBUG oslo_concurrency.lockutils [req-2be039fd-a9bf-411d-85d2-2e61f8892ff3 req-4722db82-dce4-4791-a853-287e8e1b38f5 service nova] Releasing lock "refresh_cache-1055b531-4bca-4245-ab61-698c21b9e484" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1275.611170] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1275.611170] env[68569]: value = "task-3167997" [ 1275.611170] env[68569]: _type = "Task" [ 1275.611170] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.619746] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167997, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.060099] env[68569]: DEBUG oslo_vmware.api [None req-d4bf2ed6-d533-49f9-87d0-1c963a3ddb3f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3167994, 'name': PowerOffVM_Task, 'duration_secs': 0.201318} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.060362] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4bf2ed6-d533-49f9-87d0-1c963a3ddb3f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1276.060560] env[68569]: DEBUG nova.compute.manager [None req-d4bf2ed6-d533-49f9-87d0-1c963a3ddb3f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1276.061340] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66afcc27-4fcc-4731-9787-37a820912fd4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.121778] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3167997, 'name': CreateVM_Task, 'duration_secs': 0.28638} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.121778] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87acc843-9940-4887-81ff-3fba98343340] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1276.123052] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.123052] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1276.123052] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1276.123052] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b02cbe88-9255-48ac-8b42-b8f30fe68f76 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.127336] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1276.127336] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529bcbf3-acd6-b5aa-8775-9bd702ba6c4b" [ 1276.127336] env[68569]: _type = "Task" [ 1276.127336] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.135806] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529bcbf3-acd6-b5aa-8775-9bd702ba6c4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.572927] env[68569]: DEBUG oslo_concurrency.lockutils [None req-d4bf2ed6-d533-49f9-87d0-1c963a3ddb3f tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "e4fc902a-05c1-419c-9019-c22fa0f9ae9d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.056s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.637680] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529bcbf3-acd6-b5aa-8775-9bd702ba6c4b, 'name': SearchDatastore_Task, 'duration_secs': 0.011344} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.638035] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1276.638374] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1276.638661] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.638842] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1276.639071] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1276.639393] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34abcdf4-a5b9-4262-b3aa-8fdc0cb87f60 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.650824] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1276.650992] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1276.651776] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd608152-607b-4a45-b185-5a1f854922c8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.656944] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1276.656944] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5249274b-24a1-d9f1-f63a-1ca24940c979" [ 1276.656944] env[68569]: _type = "Task" [ 1276.656944] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.664138] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5249274b-24a1-d9f1-f63a-1ca24940c979, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.935892] env[68569]: DEBUG nova.objects.instance [None req-50a511ff-5c91-4e22-bf8e-96ef83f1d1b5 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lazy-loading 'flavor' on Instance uuid e4fc902a-05c1-419c-9019-c22fa0f9ae9d {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1277.168444] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5249274b-24a1-d9f1-f63a-1ca24940c979, 'name': SearchDatastore_Task, 'duration_secs': 0.032288} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.169237] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ba7f91c-4f1d-4ac2-8aa7-7e16d97f2111 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.174302] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1277.174302] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529c0a9f-7b8e-03d3-7fe2-c8d0234b53d9" [ 1277.174302] env[68569]: _type = "Task" [ 1277.174302] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.181560] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529c0a9f-7b8e-03d3-7fe2-c8d0234b53d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.442192] env[68569]: DEBUG oslo_concurrency.lockutils [None req-50a511ff-5c91-4e22-bf8e-96ef83f1d1b5 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "refresh_cache-e4fc902a-05c1-419c-9019-c22fa0f9ae9d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.442369] env[68569]: DEBUG oslo_concurrency.lockutils [None req-50a511ff-5c91-4e22-bf8e-96ef83f1d1b5 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "refresh_cache-e4fc902a-05c1-419c-9019-c22fa0f9ae9d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1277.442490] env[68569]: DEBUG nova.network.neutron [None req-50a511ff-5c91-4e22-bf8e-96ef83f1d1b5 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1277.442668] env[68569]: DEBUG nova.objects.instance [None req-50a511ff-5c91-4e22-bf8e-96ef83f1d1b5 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lazy-loading 'info_cache' on Instance uuid e4fc902a-05c1-419c-9019-c22fa0f9ae9d {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1277.684203] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529c0a9f-7b8e-03d3-7fe2-c8d0234b53d9, 'name': SearchDatastore_Task, 'duration_secs': 0.037193} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.684523] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1277.684691] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 87acc843-9940-4887-81ff-3fba98343340/87acc843-9940-4887-81ff-3fba98343340.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1277.684943] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31fec595-ad0d-4f2a-9e42-6b216a745105 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.692517] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1277.692517] env[68569]: value = "task-3167998" [ 1277.692517] env[68569]: _type = "Task" [ 1277.692517] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.699361] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3167998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.945817] env[68569]: DEBUG nova.objects.base [None req-50a511ff-5c91-4e22-bf8e-96ef83f1d1b5 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1278.203044] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3167998, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.421057} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.203262] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 87acc843-9940-4887-81ff-3fba98343340/87acc843-9940-4887-81ff-3fba98343340.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1278.203484] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1278.203730] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c9a915c5-babf-4b45-bae6-fa2b5c4747f0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.210111] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1278.210111] env[68569]: value = "task-3167999" [ 1278.210111] env[68569]: _type = "Task" [ 1278.210111] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.217506] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3167999, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.646893] env[68569]: DEBUG nova.network.neutron [None req-50a511ff-5c91-4e22-bf8e-96ef83f1d1b5 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Updating instance_info_cache with network_info: [{"id": "631ce46a-8b09-43bc-a285-96eaf4e3bc0c", "address": "fa:16:3e:43:35:2a", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap631ce46a-8b", "ovs_interfaceid": "631ce46a-8b09-43bc-a285-96eaf4e3bc0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.720287] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3167999, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062181} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.720575] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1278.721292] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac92848-7424-4f4f-b32a-08df94e46a64 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.740868] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] 87acc843-9940-4887-81ff-3fba98343340/87acc843-9940-4887-81ff-3fba98343340.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1278.741152] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6af1cb28-fce5-420e-a313-1cd6bb01130d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.760596] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1278.760596] env[68569]: value = "task-3168000" [ 1278.760596] env[68569]: _type = "Task" [ 1278.760596] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.767552] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168000, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.150162] env[68569]: DEBUG oslo_concurrency.lockutils [None req-50a511ff-5c91-4e22-bf8e-96ef83f1d1b5 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "refresh_cache-e4fc902a-05c1-419c-9019-c22fa0f9ae9d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1279.270023] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168000, 'name': ReconfigVM_Task, 'duration_secs': 0.311771} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.270263] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Reconfigured VM instance instance-00000079 to attach disk [datastore2] 87acc843-9940-4887-81ff-3fba98343340/87acc843-9940-4887-81ff-3fba98343340.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1279.270885] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-165862c6-795a-436b-87af-407a5d8f93a9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.277693] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1279.277693] env[68569]: value = "task-3168001" [ 1279.277693] env[68569]: _type = "Task" [ 1279.277693] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.286599] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168001, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.787265] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168001, 'name': Rename_Task, 'duration_secs': 0.135994} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.787656] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1279.787809] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34a380f0-b7b7-4050-8463-bac606c6cd24 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.794657] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1279.794657] env[68569]: value = "task-3168002" [ 1279.794657] env[68569]: _type = "Task" [ 1279.794657] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.802426] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168002, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.155793] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-50a511ff-5c91-4e22-bf8e-96ef83f1d1b5 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1280.156155] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d9d5ec7-36f7-48a5-a0be-a73e3e0aec68 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.163958] env[68569]: DEBUG oslo_vmware.api [None req-50a511ff-5c91-4e22-bf8e-96ef83f1d1b5 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1280.163958] env[68569]: value = "task-3168003" [ 1280.163958] env[68569]: _type = "Task" [ 1280.163958] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.172017] env[68569]: DEBUG oslo_vmware.api [None req-50a511ff-5c91-4e22-bf8e-96ef83f1d1b5 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3168003, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.304802] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168002, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.674425] env[68569]: DEBUG oslo_vmware.api [None req-50a511ff-5c91-4e22-bf8e-96ef83f1d1b5 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3168003, 'name': PowerOnVM_Task, 'duration_secs': 0.414526} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.674756] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-50a511ff-5c91-4e22-bf8e-96ef83f1d1b5 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1280.675040] env[68569]: DEBUG nova.compute.manager [None req-50a511ff-5c91-4e22-bf8e-96ef83f1d1b5 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1280.675886] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17c6890-03ed-4bf7-9c0a-e1d17f68e6e7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.806210] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168002, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.307296] env[68569]: DEBUG oslo_vmware.api [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168002, 'name': PowerOnVM_Task, 'duration_secs': 1.033718} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.307566] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1281.307764] env[68569]: INFO nova.compute.manager [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Took 5.81 seconds to spawn the instance on the hypervisor. [ 1281.307942] env[68569]: DEBUG nova.compute.manager [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1281.308766] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e6519d-8461-4105-9e1f-3ece23ae5de7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.715403] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88191b3-0af0-44db-958a-1130e816d765 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.722048] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3bbf642c-163c-410f-a17e-4598672efbf4 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Suspending the VM {{(pid=68569) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1281.722273] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d99481e0-ae1a-45d7-9bdc-dc6877501b99 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.728582] env[68569]: DEBUG oslo_vmware.api [None req-3bbf642c-163c-410f-a17e-4598672efbf4 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1281.728582] env[68569]: value = "task-3168004" [ 1281.728582] env[68569]: _type = "Task" [ 1281.728582] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.737335] env[68569]: DEBUG oslo_vmware.api [None req-3bbf642c-163c-410f-a17e-4598672efbf4 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3168004, 'name': SuspendVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.825387] env[68569]: INFO nova.compute.manager [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Took 10.50 seconds to build instance. [ 1282.238816] env[68569]: DEBUG oslo_vmware.api [None req-3bbf642c-163c-410f-a17e-4598672efbf4 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3168004, 'name': SuspendVM_Task} progress is 70%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.327752] env[68569]: DEBUG oslo_concurrency.lockutils [None req-8b01966a-cde1-461a-9e94-4132de8afe1a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Lock "87acc843-9940-4887-81ff-3fba98343340" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.010s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.467760] env[68569]: INFO nova.compute.manager [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Rebuilding instance [ 1282.507802] env[68569]: DEBUG nova.compute.manager [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1282.508733] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59fdd32a-5fb3-46e1-9b31-0c31095e26a3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.739362] env[68569]: DEBUG oslo_vmware.api [None req-3bbf642c-163c-410f-a17e-4598672efbf4 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3168004, 'name': SuspendVM_Task, 'duration_secs': 0.999575} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.739675] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3bbf642c-163c-410f-a17e-4598672efbf4 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Suspended the VM {{(pid=68569) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1282.739866] env[68569]: DEBUG nova.compute.manager [None req-3bbf642c-163c-410f-a17e-4598672efbf4 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1282.740658] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a11188-0a11-4b4b-b1fb-9583aa2f50b4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.522817] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1283.523208] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8fdac0ea-79e4-469a-9a5d-98173882ba9d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.535028] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1283.535028] env[68569]: value = "task-3168005" [ 1283.535028] env[68569]: _type = "Task" [ 1283.535028] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.544237] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.036316] env[68569]: INFO nova.compute.manager [None req-111942a7-7219-4cd3-be14-e686591d2200 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Resuming [ 1284.037063] env[68569]: DEBUG nova.objects.instance [None req-111942a7-7219-4cd3-be14-e686591d2200 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lazy-loading 'flavor' on Instance uuid e4fc902a-05c1-419c-9019-c22fa0f9ae9d {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1284.047395] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168005, 'name': PowerOffVM_Task, 'duration_secs': 0.123059} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.047395] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1284.048523] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1284.048820] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c422ec3-d19e-46ca-823c-c0c0c109f247 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.055627] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1284.056405] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0b5454f-9575-46c0-9212-844399bcb75f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.082655] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1284.082950] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1284.083325] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Deleting the datastore file [datastore2] 87acc843-9940-4887-81ff-3fba98343340 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1284.083407] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0223f0af-26ea-4202-b064-30ecc3d9fda8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.091657] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1284.091657] env[68569]: value = "task-3168007" [ 1284.091657] env[68569]: _type = "Task" [ 1284.091657] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.099616] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168007, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.601658] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168007, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08878} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.603038] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1284.603038] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1284.603038] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1285.550795] env[68569]: DEBUG oslo_concurrency.lockutils [None req-111942a7-7219-4cd3-be14-e686591d2200 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "refresh_cache-e4fc902a-05c1-419c-9019-c22fa0f9ae9d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.550984] env[68569]: DEBUG oslo_concurrency.lockutils [None req-111942a7-7219-4cd3-be14-e686591d2200 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquired lock "refresh_cache-e4fc902a-05c1-419c-9019-c22fa0f9ae9d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1285.551143] env[68569]: DEBUG nova.network.neutron [None req-111942a7-7219-4cd3-be14-e686591d2200 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1285.638025] env[68569]: DEBUG nova.virt.hardware [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1285.638025] env[68569]: DEBUG nova.virt.hardware [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1285.638412] env[68569]: DEBUG nova.virt.hardware [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1285.638412] env[68569]: DEBUG nova.virt.hardware [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1285.638488] env[68569]: DEBUG nova.virt.hardware [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1285.638639] env[68569]: DEBUG nova.virt.hardware [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1285.638846] env[68569]: DEBUG nova.virt.hardware [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1285.639009] env[68569]: DEBUG nova.virt.hardware [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1285.639196] env[68569]: DEBUG nova.virt.hardware [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1285.639343] env[68569]: DEBUG nova.virt.hardware [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1285.639545] env[68569]: DEBUG nova.virt.hardware [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1285.640392] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b1a607-e295-4195-9663-e46ef6fa5b85 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.648605] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3516c6af-2ede-4f7f-801b-1734a312edff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.661454] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1285.666813] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1285.667036] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87acc843-9940-4887-81ff-3fba98343340] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1285.667231] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48be888e-12d0-455d-910f-8a24a144a7b3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.682675] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1285.682675] env[68569]: value = "task-3168008" [ 1285.682675] env[68569]: _type = "Task" [ 1285.682675] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.689695] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168008, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.192093] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168008, 'name': CreateVM_Task, 'duration_secs': 0.247858} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.192250] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87acc843-9940-4887-81ff-3fba98343340] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1286.192650] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.192817] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1286.193159] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1286.193428] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a2666a4-ba20-4212-96e3-dce526456e28 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.200020] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1286.200020] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52739a36-ca9c-00eb-9825-aedcafb84647" [ 1286.200020] env[68569]: _type = "Task" [ 1286.200020] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.206828] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52739a36-ca9c-00eb-9825-aedcafb84647, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.250669] env[68569]: DEBUG nova.network.neutron [None req-111942a7-7219-4cd3-be14-e686591d2200 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Updating instance_info_cache with network_info: [{"id": "631ce46a-8b09-43bc-a285-96eaf4e3bc0c", "address": "fa:16:3e:43:35:2a", "network": {"id": "f01ae337-f0c5-48f3-9fd4-0eba7ebc0f8a", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1956587258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa0ab47201c64b0d87480d4ff90014f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap631ce46a-8b", "ovs_interfaceid": "631ce46a-8b09-43bc-a285-96eaf4e3bc0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1286.712016] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52739a36-ca9c-00eb-9825-aedcafb84647, 'name': SearchDatastore_Task, 'duration_secs': 0.009329} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.712389] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1286.712532] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1286.712763] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.712908] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1286.713101] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1286.713358] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf749d0b-5401-43f9-9dcd-50b913366d52 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.721172] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1286.721344] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1286.721993] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4aff4a8d-9c13-4e7e-82e2-0b8a372f4c0d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.726686] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1286.726686] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52194d6e-5251-5511-5560-19d1d02628b5" [ 1286.726686] env[68569]: _type = "Task" [ 1286.726686] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.733387] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52194d6e-5251-5511-5560-19d1d02628b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.752965] env[68569]: DEBUG oslo_concurrency.lockutils [None req-111942a7-7219-4cd3-be14-e686591d2200 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Releasing lock "refresh_cache-e4fc902a-05c1-419c-9019-c22fa0f9ae9d" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1286.753735] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6042f4f1-cb46-47b6-ae3c-d2a62a96bbb7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.759336] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-111942a7-7219-4cd3-be14-e686591d2200 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Resuming the VM {{(pid=68569) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1286.759538] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-59740131-6170-46ff-b1f9-5be978b845ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.765254] env[68569]: DEBUG oslo_vmware.api [None req-111942a7-7219-4cd3-be14-e686591d2200 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1286.765254] env[68569]: value = "task-3168009" [ 1286.765254] env[68569]: _type = "Task" [ 1286.765254] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.772079] env[68569]: DEBUG oslo_vmware.api [None req-111942a7-7219-4cd3-be14-e686591d2200 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3168009, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.237046] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52194d6e-5251-5511-5560-19d1d02628b5, 'name': SearchDatastore_Task, 'duration_secs': 0.007573} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.237888] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4fd70ae-db01-439a-8708-50146c4535e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.244239] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1287.244239] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52551063-85c1-e840-7980-ad8e0c56e0f5" [ 1287.244239] env[68569]: _type = "Task" [ 1287.244239] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.252333] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52551063-85c1-e840-7980-ad8e0c56e0f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.274076] env[68569]: DEBUG oslo_vmware.api [None req-111942a7-7219-4cd3-be14-e686591d2200 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3168009, 'name': PowerOnVM_Task, 'duration_secs': 0.489632} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.274357] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-111942a7-7219-4cd3-be14-e686591d2200 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Resumed the VM {{(pid=68569) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1287.274546] env[68569]: DEBUG nova.compute.manager [None req-111942a7-7219-4cd3-be14-e686591d2200 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1287.275374] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a39e41-69f6-4bb9-896f-c97ddb405540 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.756837] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52551063-85c1-e840-7980-ad8e0c56e0f5, 'name': SearchDatastore_Task, 'duration_secs': 0.010681} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.757235] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1287.757567] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 87acc843-9940-4887-81ff-3fba98343340/87acc843-9940-4887-81ff-3fba98343340.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1287.757915] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3c50672-c4ad-4478-9714-ea38e7b100b4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.765487] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1287.765487] env[68569]: value = "task-3168010" [ 1287.765487] env[68569]: _type = "Task" [ 1287.765487] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.773061] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168010, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.282831] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168010, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.777219] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168010, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608359} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.777607] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 87acc843-9940-4887-81ff-3fba98343340/87acc843-9940-4887-81ff-3fba98343340.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1288.777735] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1288.778020] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c328f404-9fa7-418b-8344-2ed09551b3da {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.783313] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "e4fc902a-05c1-419c-9019-c22fa0f9ae9d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.783528] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "e4fc902a-05c1-419c-9019-c22fa0f9ae9d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.783725] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "e4fc902a-05c1-419c-9019-c22fa0f9ae9d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.783903] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "e4fc902a-05c1-419c-9019-c22fa0f9ae9d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.784078] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "e4fc902a-05c1-419c-9019-c22fa0f9ae9d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.786524] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1288.786524] env[68569]: value = "task-3168011" [ 1288.786524] env[68569]: _type = "Task" [ 1288.786524] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.786940] env[68569]: INFO nova.compute.manager [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Terminating instance [ 1288.795958] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168011, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.292961] env[68569]: DEBUG nova.compute.manager [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1289.293191] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1289.293922] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ece9b6f-9bb0-4867-a700-3a7a4fa96452 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.301482] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168011, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064715} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.303462] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1289.303756] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1289.304415] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d28383-702d-48a8-80c7-a0088d04febf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.306533] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e78fd7e-c054-4487-b9c8-5ce0a8f625c5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.324918] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 87acc843-9940-4887-81ff-3fba98343340/87acc843-9940-4887-81ff-3fba98343340.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1289.326157] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8e7efc0-268b-46e5-8853-7d50c49da841 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.340112] env[68569]: DEBUG oslo_vmware.api [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1289.340112] env[68569]: value = "task-3168012" [ 1289.340112] env[68569]: _type = "Task" [ 1289.340112] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.345607] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1289.345607] env[68569]: value = "task-3168013" [ 1289.345607] env[68569]: _type = "Task" [ 1289.345607] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.348438] env[68569]: DEBUG oslo_vmware.api [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3168012, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.355530] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168013, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.849524] env[68569]: DEBUG oslo_vmware.api [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3168012, 'name': PowerOffVM_Task, 'duration_secs': 0.174395} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.852616] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1289.852814] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1289.853082] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b672f21-fc8a-406d-a683-cf61cbfb06cb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.858540] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168013, 'name': ReconfigVM_Task, 'duration_secs': 0.304364} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.858786] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 87acc843-9940-4887-81ff-3fba98343340/87acc843-9940-4887-81ff-3fba98343340.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1289.859355] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e56714c2-8cc9-45ed-a245-822c3127b9e1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.865689] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1289.865689] env[68569]: value = "task-3168015" [ 1289.865689] env[68569]: _type = "Task" [ 1289.865689] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.872889] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168015, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.920291] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1289.920503] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1289.920683] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Deleting the datastore file [datastore2] e4fc902a-05c1-419c-9019-c22fa0f9ae9d {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1289.920944] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5b4bc87-5f1f-458f-ab2a-c78feeb53350 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.926829] env[68569]: DEBUG oslo_vmware.api [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for the task: (returnval){ [ 1289.926829] env[68569]: value = "task-3168016" [ 1289.926829] env[68569]: _type = "Task" [ 1289.926829] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.934155] env[68569]: DEBUG oslo_vmware.api [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3168016, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.375113] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168015, 'name': Rename_Task, 'duration_secs': 0.145611} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.375395] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1290.375639] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c922309b-7548-403f-9b4c-bfad6e509b48 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.382084] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1290.382084] env[68569]: value = "task-3168017" [ 1290.382084] env[68569]: _type = "Task" [ 1290.382084] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.388845] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168017, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.434991] env[68569]: DEBUG oslo_vmware.api [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Task: {'id': task-3168016, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152302} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.435252] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1290.435463] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1290.435655] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1290.435825] env[68569]: INFO nova.compute.manager [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1290.436071] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1290.436255] env[68569]: DEBUG nova.compute.manager [-] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1290.436349] env[68569]: DEBUG nova.network.neutron [-] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1290.892948] env[68569]: DEBUG oslo_vmware.api [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168017, 'name': PowerOnVM_Task, 'duration_secs': 0.398267} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.893396] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1290.893479] env[68569]: DEBUG nova.compute.manager [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1290.894196] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a5a195-a583-4bbe-bd9c-5757dd83ec6d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.095949] env[68569]: DEBUG nova.compute.manager [req-5a18378f-27c9-4f1d-9293-ded549313e1b req-9cacb6ce-bee2-455e-be03-ff5573f19e10 service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Received event network-vif-deleted-631ce46a-8b09-43bc-a285-96eaf4e3bc0c {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1291.096296] env[68569]: INFO nova.compute.manager [req-5a18378f-27c9-4f1d-9293-ded549313e1b req-9cacb6ce-bee2-455e-be03-ff5573f19e10 service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Neutron deleted interface 631ce46a-8b09-43bc-a285-96eaf4e3bc0c; detaching it from the instance and deleting it from the info cache [ 1291.096353] env[68569]: DEBUG nova.network.neutron [req-5a18378f-27c9-4f1d-9293-ded549313e1b req-9cacb6ce-bee2-455e-be03-ff5573f19e10 service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.409866] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1291.409866] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1291.409866] env[68569]: DEBUG nova.objects.instance [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68569) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1291.581360] env[68569]: DEBUG nova.network.neutron [-] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.599463] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-450d70ae-4ff5-4010-aa5a-a86702cfcdb8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.609666] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c9e695-2fcf-44cc-9cce-eb758444b77b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.636331] env[68569]: DEBUG nova.compute.manager [req-5a18378f-27c9-4f1d-9293-ded549313e1b req-9cacb6ce-bee2-455e-be03-ff5573f19e10 service nova] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Detach interface failed, port_id=631ce46a-8b09-43bc-a285-96eaf4e3bc0c, reason: Instance e4fc902a-05c1-419c-9019-c22fa0f9ae9d could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1292.083936] env[68569]: INFO nova.compute.manager [-] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Took 1.65 seconds to deallocate network for instance. [ 1292.248325] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquiring lock "87acc843-9940-4887-81ff-3fba98343340" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.248607] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Lock "87acc843-9940-4887-81ff-3fba98343340" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.249142] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquiring lock "87acc843-9940-4887-81ff-3fba98343340-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.249142] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Lock "87acc843-9940-4887-81ff-3fba98343340-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.249142] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Lock "87acc843-9940-4887-81ff-3fba98343340-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.251131] env[68569]: INFO nova.compute.manager [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Terminating instance [ 1292.417303] env[68569]: DEBUG oslo_concurrency.lockutils [None req-6447621b-9a8c-4731-a84a-06a7e0534679 tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.590435] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.590718] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.590933] env[68569]: DEBUG nova.objects.instance [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lazy-loading 'resources' on Instance uuid e4fc902a-05c1-419c-9019-c22fa0f9ae9d {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1292.754783] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquiring lock "refresh_cache-87acc843-9940-4887-81ff-3fba98343340" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.754939] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquired lock "refresh_cache-87acc843-9940-4887-81ff-3fba98343340" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1292.755141] env[68569]: DEBUG nova.network.neutron [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1293.153265] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b027219-5a07-47da-9cad-eee741f5219d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.160983] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0245dda-a47c-48dd-977c-e84e66518021 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.190697] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de7c569-c7ce-4c3d-b40c-10cd5605db9e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.198101] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23bfce06-2369-4951-bcb0-6c304d044bc0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.210874] env[68569]: DEBUG nova.compute.provider_tree [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1293.274558] env[68569]: DEBUG nova.network.neutron [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1293.330167] env[68569]: DEBUG nova.network.neutron [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.714558] env[68569]: DEBUG nova.scheduler.client.report [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1293.832402] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Releasing lock "refresh_cache-87acc843-9940-4887-81ff-3fba98343340" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1293.832869] env[68569]: DEBUG nova.compute.manager [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1293.833083] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1293.834018] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d11247-27fe-4815-b610-4f723a77000e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.843668] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1293.843919] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90145dbf-a42c-48e3-9226-8d3539670c02 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.849948] env[68569]: DEBUG oslo_vmware.api [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1293.849948] env[68569]: value = "task-3168018" [ 1293.849948] env[68569]: _type = "Task" [ 1293.849948] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.856730] env[68569]: DEBUG oslo_vmware.api [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168018, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.219594] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.629s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.240718] env[68569]: INFO nova.scheduler.client.report [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Deleted allocations for instance e4fc902a-05c1-419c-9019-c22fa0f9ae9d [ 1294.359921] env[68569]: DEBUG oslo_vmware.api [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168018, 'name': PowerOffVM_Task, 'duration_secs': 0.104401} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.360150] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1294.360281] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1294.360517] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7402f9f8-8620-48bb-b8a1-aa27b407c473 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.384227] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1294.384457] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1294.384613] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Deleting the datastore file [datastore1] 87acc843-9940-4887-81ff-3fba98343340 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1294.384854] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b6f07c1-18a6-4a16-8832-78195ad06b55 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.391516] env[68569]: DEBUG oslo_vmware.api [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for the task: (returnval){ [ 1294.391516] env[68569]: value = "task-3168020" [ 1294.391516] env[68569]: _type = "Task" [ 1294.391516] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.398916] env[68569]: DEBUG oslo_vmware.api [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168020, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.748876] env[68569]: DEBUG oslo_concurrency.lockutils [None req-3652a6d7-20a8-4722-9831-990bfe7b71b2 tempest-ServerActionsTestJSON-1198141261 tempest-ServerActionsTestJSON-1198141261-project-member] Lock "e4fc902a-05c1-419c-9019-c22fa0f9ae9d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.965s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.902317] env[68569]: DEBUG oslo_vmware.api [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Task: {'id': task-3168020, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097753} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.902575] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1294.902760] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1294.902969] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1294.903366] env[68569]: INFO nova.compute.manager [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] [instance: 87acc843-9940-4887-81ff-3fba98343340] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1294.903619] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1294.903816] env[68569]: DEBUG nova.compute.manager [-] [instance: 87acc843-9940-4887-81ff-3fba98343340] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1294.903909] env[68569]: DEBUG nova.network.neutron [-] [instance: 87acc843-9940-4887-81ff-3fba98343340] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1294.919371] env[68569]: DEBUG nova.network.neutron [-] [instance: 87acc843-9940-4887-81ff-3fba98343340] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1295.421786] env[68569]: DEBUG nova.network.neutron [-] [instance: 87acc843-9940-4887-81ff-3fba98343340] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1295.924539] env[68569]: INFO nova.compute.manager [-] [instance: 87acc843-9940-4887-81ff-3fba98343340] Took 1.02 seconds to deallocate network for instance. [ 1296.432636] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.432939] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1296.433211] env[68569]: DEBUG nova.objects.instance [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Lazy-loading 'resources' on Instance uuid 87acc843-9940-4887-81ff-3fba98343340 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1296.991602] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba2152f-3796-4047-823d-4f1274327d41 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.999264] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb95222d-0d2d-4711-8028-c6434c3f29eb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.030082] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f9084f-9af6-4adc-83ac-c9aa5d414198 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.037337] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50aaeabb-917e-41b4-a98d-ce62ac5934b1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.050231] env[68569]: DEBUG nova.compute.provider_tree [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1297.553234] env[68569]: DEBUG nova.scheduler.client.report [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1298.061583] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.628s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1298.087669] env[68569]: INFO nova.scheduler.client.report [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Deleted allocations for instance 87acc843-9940-4887-81ff-3fba98343340 [ 1298.207219] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "d57645fc-82d5-4ae8-93c3-0de095a66649" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.207500] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1298.207650] env[68569]: INFO nova.compute.manager [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Shelving [ 1298.595174] env[68569]: DEBUG oslo_concurrency.lockutils [None req-a2ebb0ab-ff65-48ef-9177-32d720878f7a tempest-ServersListShow296Test-975303031 tempest-ServersListShow296Test-975303031-project-member] Lock "87acc843-9940-4887-81ff-3fba98343340" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.346s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1299.217206] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1299.217536] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25bdb7e7-4a8a-4ff7-8e93-1cca3507399c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.224256] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1299.224256] env[68569]: value = "task-3168021" [ 1299.224256] env[68569]: _type = "Task" [ 1299.224256] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.232447] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168021, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.655846] env[68569]: DEBUG oslo_concurrency.lockutils [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquiring lock "612b436d-b0ac-4bda-8248-9b354201fde0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1299.656132] env[68569]: DEBUG oslo_concurrency.lockutils [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Lock "612b436d-b0ac-4bda-8248-9b354201fde0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1299.733450] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168021, 'name': PowerOffVM_Task, 'duration_secs': 0.192514} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.733703] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1299.734470] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ec1833-962b-450f-8e01-e9eaf1024dc5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.751787] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23821adf-2490-4a21-9411-da3ff234e101 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.158274] env[68569]: DEBUG nova.compute.manager [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1300.261157] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1300.261418] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-aec94bb4-a54c-4403-90ae-de171a1d50f5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.269223] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1300.269223] env[68569]: value = "task-3168022" [ 1300.269223] env[68569]: _type = "Task" [ 1300.269223] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.277335] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168022, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.681342] env[68569]: DEBUG oslo_concurrency.lockutils [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1300.681627] env[68569]: DEBUG oslo_concurrency.lockutils [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1300.683088] env[68569]: INFO nova.compute.claims [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1300.779439] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168022, 'name': CreateSnapshot_Task, 'duration_secs': 0.38504} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.779713] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1300.780502] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef5339b-6bd0-4394-9bfd-cb1884febe67 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.297615] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1301.297853] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-65998ab3-fa46-4160-93bf-3d875704ae54 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.306222] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1301.306222] env[68569]: value = "task-3168023" [ 1301.306222] env[68569]: _type = "Task" [ 1301.306222] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.313777] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168023, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.738305] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5013991a-fe59-49ff-b549-8270663702d6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.747554] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cae866c-68b3-4c10-8d37-b531d0b7fce7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.777744] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb05b21-e984-48b9-92c5-e84f7266bc84 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.784827] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9722293f-a549-4947-add8-4d400118e7b5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.797696] env[68569]: DEBUG nova.compute.provider_tree [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1301.815148] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168023, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.301376] env[68569]: DEBUG nova.scheduler.client.report [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1302.317439] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168023, 'name': CloneVM_Task, 'duration_secs': 0.888076} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.318266] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Created linked-clone VM from snapshot [ 1302.319097] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1dc931-1810-4ac9-91b4-2ec6645e5fd6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.326728] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Uploading image 148c52e9-00a5-490d-b57e-61916d114658 {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1302.350488] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1302.350488] env[68569]: value = "vm-633758" [ 1302.350488] env[68569]: _type = "VirtualMachine" [ 1302.350488] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1302.350742] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-10e23503-e99d-450d-b9db-0588d36efb6c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.357279] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lease: (returnval){ [ 1302.357279] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52aef40f-fd6f-4940-3780-0a0c27c0720e" [ 1302.357279] env[68569]: _type = "HttpNfcLease" [ 1302.357279] env[68569]: } obtained for exporting VM: (result){ [ 1302.357279] env[68569]: value = "vm-633758" [ 1302.357279] env[68569]: _type = "VirtualMachine" [ 1302.357279] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1302.357583] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the lease: (returnval){ [ 1302.357583] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52aef40f-fd6f-4940-3780-0a0c27c0720e" [ 1302.357583] env[68569]: _type = "HttpNfcLease" [ 1302.357583] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1302.363321] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1302.363321] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52aef40f-fd6f-4940-3780-0a0c27c0720e" [ 1302.363321] env[68569]: _type = "HttpNfcLease" [ 1302.363321] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1302.806143] env[68569]: DEBUG oslo_concurrency.lockutils [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.124s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1302.806694] env[68569]: DEBUG nova.compute.manager [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1302.867088] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1302.867088] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52aef40f-fd6f-4940-3780-0a0c27c0720e" [ 1302.867088] env[68569]: _type = "HttpNfcLease" [ 1302.867088] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1302.867380] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1302.867380] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52aef40f-fd6f-4940-3780-0a0c27c0720e" [ 1302.867380] env[68569]: _type = "HttpNfcLease" [ 1302.867380] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1302.868041] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f41b486-fcdb-4f0f-ab54-bc06a514eb5a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.875123] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b6f913-1d42-3836-da2e-f519c1845498/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1302.875289] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b6f913-1d42-3836-da2e-f519c1845498/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1302.961907] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1d0cdacc-3cd1-4e72-9e79-05e47edc21fc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.311764] env[68569]: DEBUG nova.compute.utils [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1303.313559] env[68569]: DEBUG nova.compute.manager [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Not allocating networking since 'none' was specified. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1303.815201] env[68569]: DEBUG nova.compute.manager [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1304.824971] env[68569]: DEBUG nova.compute.manager [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1304.848780] env[68569]: DEBUG nova.virt.hardware [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1304.849062] env[68569]: DEBUG nova.virt.hardware [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1304.849237] env[68569]: DEBUG nova.virt.hardware [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1304.849530] env[68569]: DEBUG nova.virt.hardware [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1304.849733] env[68569]: DEBUG nova.virt.hardware [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1304.849912] env[68569]: DEBUG nova.virt.hardware [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1304.850189] env[68569]: DEBUG nova.virt.hardware [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1304.850396] env[68569]: DEBUG nova.virt.hardware [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1304.850625] env[68569]: DEBUG nova.virt.hardware [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1304.850822] env[68569]: DEBUG nova.virt.hardware [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1304.851077] env[68569]: DEBUG nova.virt.hardware [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1304.851990] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1bfa431-dcd8-4d92-bc72-1418b4c2ac4e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.859954] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4368bfed-ef99-4260-9921-78490a8a318d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.873165] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1304.878732] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Creating folder: Project (03ccce2025924571a56d356ef67fea85). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1304.879375] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e4e1293f-6913-4a82-bf10-b6b155607590 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.889327] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Created folder: Project (03ccce2025924571a56d356ef67fea85) in parent group-v633430. [ 1304.889597] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Creating folder: Instances. Parent ref: group-v633759. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1304.889884] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfde5ff6-658a-4f15-abfd-6a878ff4fb0a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.897972] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Created folder: Instances in parent group-v633759. [ 1304.898227] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1304.898427] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1304.898723] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7d3c44c-ff32-40a1-8b31-320e221e0b9e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.914255] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1304.914255] env[68569]: value = "task-3168027" [ 1304.914255] env[68569]: _type = "Task" [ 1304.914255] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.921440] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168027, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.423971] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168027, 'name': CreateVM_Task, 'duration_secs': 0.265332} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.424190] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1305.424624] env[68569]: DEBUG oslo_concurrency.lockutils [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.424807] env[68569]: DEBUG oslo_concurrency.lockutils [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1305.425134] env[68569]: DEBUG oslo_concurrency.lockutils [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1305.425393] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66206896-b52a-436d-b537-c300922f2c90 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.429810] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1305.429810] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52dbf726-ca6d-c63a-1d31-3ab6a02edaee" [ 1305.429810] env[68569]: _type = "Task" [ 1305.429810] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.437306] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52dbf726-ca6d-c63a-1d31-3ab6a02edaee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.940556] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52dbf726-ca6d-c63a-1d31-3ab6a02edaee, 'name': SearchDatastore_Task, 'duration_secs': 0.010118} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.940930] env[68569]: DEBUG oslo_concurrency.lockutils [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1305.941129] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1305.941375] env[68569]: DEBUG oslo_concurrency.lockutils [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.941526] env[68569]: DEBUG oslo_concurrency.lockutils [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1305.941713] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1305.941982] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e344b3d-c5f0-44b3-ae76-f82538474f02 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.949975] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1305.950170] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1305.950872] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc2aac9c-8b52-4a58-81c3-3636342afc18 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.955891] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1305.955891] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529122f1-d3f8-72f4-9b8b-43f43b039dff" [ 1305.955891] env[68569]: _type = "Task" [ 1305.955891] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.963097] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529122f1-d3f8-72f4-9b8b-43f43b039dff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.467849] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529122f1-d3f8-72f4-9b8b-43f43b039dff, 'name': SearchDatastore_Task, 'duration_secs': 0.008732} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.468664] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-901cb465-8f76-4673-b432-c58e31de7479 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.474134] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1306.474134] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52755b18-1b13-af70-817e-63feeca4ee8f" [ 1306.474134] env[68569]: _type = "Task" [ 1306.474134] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.481599] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52755b18-1b13-af70-817e-63feeca4ee8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.984603] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52755b18-1b13-af70-817e-63feeca4ee8f, 'name': SearchDatastore_Task, 'duration_secs': 0.012493} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.984983] env[68569]: DEBUG oslo_concurrency.lockutils [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1306.985231] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 612b436d-b0ac-4bda-8248-9b354201fde0/612b436d-b0ac-4bda-8248-9b354201fde0.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1306.985531] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57ebee6c-e9d5-42f5-962e-38b6fe181ab6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.992937] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1306.992937] env[68569]: value = "task-3168028" [ 1306.992937] env[68569]: _type = "Task" [ 1306.992937] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.000716] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168028, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.502859] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168028, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480926} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.503131] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 612b436d-b0ac-4bda-8248-9b354201fde0/612b436d-b0ac-4bda-8248-9b354201fde0.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1307.503350] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1307.503605] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fd24f120-3267-45da-bb33-c1713b0875ae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.509474] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1307.509474] env[68569]: value = "task-3168029" [ 1307.509474] env[68569]: _type = "Task" [ 1307.509474] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.519886] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168029, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.019897] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168029, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097892} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.020292] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1308.020988] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e796914-9df0-4248-a324-d4ce04499084 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.040685] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 612b436d-b0ac-4bda-8248-9b354201fde0/612b436d-b0ac-4bda-8248-9b354201fde0.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1308.041015] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fd942e7-0981-44bf-9ce6-92c611edda33 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.060553] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1308.060553] env[68569]: value = "task-3168030" [ 1308.060553] env[68569]: _type = "Task" [ 1308.060553] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.068495] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168030, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.570554] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168030, 'name': ReconfigVM_Task, 'duration_secs': 0.277445} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.570899] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 612b436d-b0ac-4bda-8248-9b354201fde0/612b436d-b0ac-4bda-8248-9b354201fde0.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1308.571584] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c7d629b-5a36-4521-89b1-fe0e6dcd8c69 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.577568] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1308.577568] env[68569]: value = "task-3168031" [ 1308.577568] env[68569]: _type = "Task" [ 1308.577568] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.584971] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168031, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.087526] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168031, 'name': Rename_Task, 'duration_secs': 0.137985} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.087868] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1309.088057] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b97066a-f978-4def-8cb4-15863021b992 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.094443] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1309.094443] env[68569]: value = "task-3168032" [ 1309.094443] env[68569]: _type = "Task" [ 1309.094443] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.099279] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1309.102665] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168032, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.604624] env[68569]: DEBUG oslo_vmware.api [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168032, 'name': PowerOnVM_Task, 'duration_secs': 0.447621} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.604927] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1309.605124] env[68569]: INFO nova.compute.manager [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Took 4.78 seconds to spawn the instance on the hypervisor. [ 1309.605373] env[68569]: DEBUG nova.compute.manager [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1309.606155] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546882f9-ae7c-4a6e-8584-beeb9e4b0451 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.775441] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b6f913-1d42-3836-da2e-f519c1845498/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1309.776463] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2407ecf5-b4f7-4aeb-aad8-859a044e15ea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.783159] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b6f913-1d42-3836-da2e-f519c1845498/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1309.783274] env[68569]: ERROR oslo_vmware.rw_handles [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b6f913-1d42-3836-da2e-f519c1845498/disk-0.vmdk due to incomplete transfer. [ 1309.783494] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0f8c260a-d94b-416c-9b3d-13962e128649 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.791431] env[68569]: DEBUG oslo_vmware.rw_handles [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b6f913-1d42-3836-da2e-f519c1845498/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1309.791623] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Uploaded image 148c52e9-00a5-490d-b57e-61916d114658 to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1309.793785] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1309.794054] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1c1ac419-86fe-4b43-bee1-4b4f20c442eb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.799790] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1309.799790] env[68569]: value = "task-3168033" [ 1309.799790] env[68569]: _type = "Task" [ 1309.799790] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.808338] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168033, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.122181] env[68569]: INFO nova.compute.manager [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Took 9.46 seconds to build instance. [ 1310.214699] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "1055b531-4bca-4245-ab61-698c21b9e484" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1310.215017] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "1055b531-4bca-4245-ab61-698c21b9e484" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1310.310102] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168033, 'name': Destroy_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.368512] env[68569]: INFO nova.compute.manager [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Rebuilding instance [ 1310.406382] env[68569]: DEBUG nova.compute.manager [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1310.407293] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db77e60-0218-4486-b048-9d81f0cbf67d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.597382] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1310.624184] env[68569]: DEBUG oslo_concurrency.lockutils [None req-097e982b-c5b6-4850-9ebd-a547d44ad9cf tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Lock "612b436d-b0ac-4bda-8248-9b354201fde0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.968s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1310.720061] env[68569]: DEBUG nova.compute.utils [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1310.809908] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168033, 'name': Destroy_Task, 'duration_secs': 0.583482} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.810186] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Destroyed the VM [ 1310.810422] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1310.810668] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8b4904ac-19ca-41ce-956e-05c0851b7c13 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.816176] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1310.816176] env[68569]: value = "task-3168034" [ 1310.816176] env[68569]: _type = "Task" [ 1310.816176] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.823714] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168034, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.222619] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "1055b531-4bca-4245-ab61-698c21b9e484" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1311.326195] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168034, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.419808] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1311.419808] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5b4bd69-1c5b-46e7-b3ef-1e6ad2f350e8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.427463] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1311.427463] env[68569]: value = "task-3168035" [ 1311.427463] env[68569]: _type = "Task" [ 1311.427463] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.435618] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168035, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.597567] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.597761] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.826738] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168034, 'name': RemoveSnapshot_Task, 'duration_secs': 0.7305} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.827049] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1311.827317] env[68569]: DEBUG nova.compute.manager [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1311.828069] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88000b08-e1d8-4393-b19d-5faddb7c00a0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.937458] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168035, 'name': PowerOffVM_Task, 'duration_secs': 0.121921} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.937662] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1311.937893] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1311.938642] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca74c97-ac6a-4b78-902f-03e8b1dfd36e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.944735] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1311.944951] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-132af867-b01a-4c21-9372-931320f621b5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.969122] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1311.969335] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1311.969513] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Deleting the datastore file [datastore1] 612b436d-b0ac-4bda-8248-9b354201fde0 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1311.969747] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0cc128e-2bb0-45ba-b9c5-d6a669454fec {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.975836] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1311.975836] env[68569]: value = "task-3168037" [ 1311.975836] env[68569]: _type = "Task" [ 1311.975836] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.982962] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168037, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.282624] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "1055b531-4bca-4245-ab61-698c21b9e484" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1312.282906] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "1055b531-4bca-4245-ab61-698c21b9e484" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1312.283142] env[68569]: INFO nova.compute.manager [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Attaching volume 2ac71f6c-0140-45ab-b458-462aa72f76ff to /dev/sdb [ 1312.313286] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437bf83a-3a07-49d8-a384-b279f0023dfc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.320605] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccaa3724-ad40-45c8-8aa6-a8e4a3b83d31 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.333564] env[68569]: DEBUG nova.virt.block_device [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Updating existing volume attachment record: 4e6c6338-109e-4d19-b5ac-6319db6a0209 {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1312.339403] env[68569]: INFO nova.compute.manager [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Shelve offloading [ 1312.486599] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168037, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211795} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.486599] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1312.486599] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1312.486599] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1312.597193] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1312.843981] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1312.844338] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-425f4686-a474-40a5-b1ec-fdef64abe805 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.850846] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1312.850846] env[68569]: value = "task-3168041" [ 1312.850846] env[68569]: _type = "Task" [ 1312.850846] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.858492] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168041, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.361096] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] VM already powered off {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1313.361388] env[68569]: DEBUG nova.compute.manager [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1313.362189] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270cf7fb-0ff7-4295-ad37-ee324b16303a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.367599] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.367765] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1313.367934] env[68569]: DEBUG nova.network.neutron [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1313.520675] env[68569]: DEBUG nova.virt.hardware [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1313.520929] env[68569]: DEBUG nova.virt.hardware [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1313.521097] env[68569]: DEBUG nova.virt.hardware [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1313.521293] env[68569]: DEBUG nova.virt.hardware [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1313.521431] env[68569]: DEBUG nova.virt.hardware [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1313.521574] env[68569]: DEBUG nova.virt.hardware [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1313.521795] env[68569]: DEBUG nova.virt.hardware [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1313.521973] env[68569]: DEBUG nova.virt.hardware [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1313.522150] env[68569]: DEBUG nova.virt.hardware [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1313.522306] env[68569]: DEBUG nova.virt.hardware [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1313.522468] env[68569]: DEBUG nova.virt.hardware [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1313.523366] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ea0656-3c57-4099-84a7-d5bcd7b98ea7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.530672] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2cdd26-8d19-41f3-9865-b283ef7e8b26 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.543416] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Instance VIF info [] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1313.548833] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1313.549061] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1313.549251] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-459ec402-0f3e-40df-9a23-d4d1a3e672e9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.564136] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1313.564136] env[68569]: value = "task-3168042" [ 1313.564136] env[68569]: _type = "Task" [ 1313.564136] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.570924] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168042, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.596818] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1314.068806] env[68569]: DEBUG nova.network.neutron [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Updating instance_info_cache with network_info: [{"id": "224a9b72-1fe5-455e-8f12-0ba6f5dd104a", "address": "fa:16:3e:e4:a9:a8", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap224a9b72-1f", "ovs_interfaceid": "224a9b72-1fe5-455e-8f12-0ba6f5dd104a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.075164] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168042, 'name': CreateVM_Task, 'duration_secs': 0.252842} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.075310] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1314.075683] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.075839] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1314.076159] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1314.076411] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31c0aff9-3a98-4462-a43a-cb87821aea68 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.081010] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1314.081010] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a9d12d-6f9d-8264-288b-e6f4cd71d185" [ 1314.081010] env[68569]: _type = "Task" [ 1314.081010] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.088583] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a9d12d-6f9d-8264-288b-e6f4cd71d185, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.571389] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1314.590556] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a9d12d-6f9d-8264-288b-e6f4cd71d185, 'name': SearchDatastore_Task, 'duration_secs': 0.009353} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.590829] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1314.591068] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1314.591301] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.591450] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1314.591626] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1314.591870] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-132d32ef-7e38-443b-816b-7beca352e32d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.597409] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1314.601103] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1314.601276] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1314.602030] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6026dedf-ef52-4e52-92ce-ee2aa79f0ddb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.607412] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1314.607412] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52eb4bbe-b83f-3c67-1173-51ea0d09d770" [ 1314.607412] env[68569]: _type = "Task" [ 1314.607412] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.615807] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52eb4bbe-b83f-3c67-1173-51ea0d09d770, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.770398] env[68569]: DEBUG nova.compute.manager [req-0228d5cb-e2ca-42a1-84c4-a9daf3fd2148 req-bad43bfa-7e24-40b7-937d-58ab2562591a service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Received event network-vif-unplugged-224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1314.770624] env[68569]: DEBUG oslo_concurrency.lockutils [req-0228d5cb-e2ca-42a1-84c4-a9daf3fd2148 req-bad43bfa-7e24-40b7-937d-58ab2562591a service nova] Acquiring lock "d57645fc-82d5-4ae8-93c3-0de095a66649-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1314.770791] env[68569]: DEBUG oslo_concurrency.lockutils [req-0228d5cb-e2ca-42a1-84c4-a9daf3fd2148 req-bad43bfa-7e24-40b7-937d-58ab2562591a service nova] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1314.770951] env[68569]: DEBUG oslo_concurrency.lockutils [req-0228d5cb-e2ca-42a1-84c4-a9daf3fd2148 req-bad43bfa-7e24-40b7-937d-58ab2562591a service nova] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1314.771165] env[68569]: DEBUG nova.compute.manager [req-0228d5cb-e2ca-42a1-84c4-a9daf3fd2148 req-bad43bfa-7e24-40b7-937d-58ab2562591a service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] No waiting events found dispatching network-vif-unplugged-224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1314.771342] env[68569]: WARNING nova.compute.manager [req-0228d5cb-e2ca-42a1-84c4-a9daf3fd2148 req-bad43bfa-7e24-40b7-937d-58ab2562591a service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Received unexpected event network-vif-unplugged-224a9b72-1fe5-455e-8f12-0ba6f5dd104a for instance with vm_state shelved and task_state shelving_offloading. [ 1314.862205] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1314.863095] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f78a21e-0a31-4842-ac63-d515b605b1ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.870190] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1314.870412] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-29575111-e54b-403a-b723-d2d6d7c94b8d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.926814] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1314.927067] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1314.927185] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleting the datastore file [datastore1] d57645fc-82d5-4ae8-93c3-0de095a66649 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1314.927438] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14a3cf2f-b72f-4709-a579-2decc4d8fcd2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.934602] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1314.934602] env[68569]: value = "task-3168045" [ 1314.934602] env[68569]: _type = "Task" [ 1314.934602] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.941855] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168045, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.103050] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.103050] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1315.103264] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1315.103395] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68569) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1315.104216] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dcc4ba1-100e-463d-827c-e2e3a7f0c935 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.114722] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f415ae2-cbad-4b10-a51d-5c412586debd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.123179] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52eb4bbe-b83f-3c67-1173-51ea0d09d770, 'name': SearchDatastore_Task, 'duration_secs': 0.009375} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.124238] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64ddee01-18e4-464c-8f22-dfd12b659c7e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.134962] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43d56a6-1f07-458c-b816-848d8c3a155d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.138205] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1315.138205] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f03745-dc24-3ddc-f3e9-4cb937a66338" [ 1315.138205] env[68569]: _type = "Task" [ 1315.138205] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.143838] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd5e7da-9f89-45b2-9c0a-0e2a12012873 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.149718] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f03745-dc24-3ddc-f3e9-4cb937a66338, 'name': SearchDatastore_Task, 'duration_secs': 0.009242} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.150258] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1315.150528] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 612b436d-b0ac-4bda-8248-9b354201fde0/612b436d-b0ac-4bda-8248-9b354201fde0.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1315.150752] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d31948e-ea68-4e9f-8a88-032a00c4b978 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.175327] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180465MB free_disk=129GB free_vcpus=48 pci_devices=None {{(pid=68569) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1315.175465] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.175654] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1315.183422] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1315.183422] env[68569]: value = "task-3168046" [ 1315.183422] env[68569]: _type = "Task" [ 1315.183422] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.190682] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168046, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.444277] env[68569]: DEBUG oslo_vmware.api [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168045, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129623} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.444493] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1315.444672] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1315.444850] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1315.468669] env[68569]: INFO nova.scheduler.client.report [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleted allocations for instance d57645fc-82d5-4ae8-93c3-0de095a66649 [ 1315.691783] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168046, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453434} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.692555] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] 612b436d-b0ac-4bda-8248-9b354201fde0/612b436d-b0ac-4bda-8248-9b354201fde0.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1315.692765] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1315.692997] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-abeb7e3c-04e1-4efb-b866-5e798db2d361 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.698828] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1315.698828] env[68569]: value = "task-3168047" [ 1315.698828] env[68569]: _type = "Task" [ 1315.698828] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.705879] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168047, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.974330] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1316.198117] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 1055b531-4bca-4245-ab61-698c21b9e484 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.198279] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 612b436d-b0ac-4bda-8248-9b354201fde0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.198455] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1316.198595] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1316.209384] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168047, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09879} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.209644] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1316.210408] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fca997b-3b41-43d0-82b1-3f68a84ec59c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.230278] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 612b436d-b0ac-4bda-8248-9b354201fde0/612b436d-b0ac-4bda-8248-9b354201fde0.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1316.232566] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6677abe-c1e3-4737-a81a-db2c7888387e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.251695] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1316.251695] env[68569]: value = "task-3168048" [ 1316.251695] env[68569]: _type = "Task" [ 1316.251695] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.255652] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcee6568-3d5c-40f0-8f92-9ebdde116ed0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.262631] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168048, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.265066] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b3705e-e457-417f-8328-18d6651b25df {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.293957] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05282433-cd6e-43b7-b299-a906fcfb02fc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.301651] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174c5b58-124b-4f47-a99a-432b22ae4768 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.316924] env[68569]: DEBUG nova.compute.provider_tree [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1316.761399] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168048, 'name': ReconfigVM_Task, 'duration_secs': 0.257877} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.761739] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 612b436d-b0ac-4bda-8248-9b354201fde0/612b436d-b0ac-4bda-8248-9b354201fde0.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1316.762283] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cc97f8ba-ee72-4d50-a27f-c5e8383eafc6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.768539] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1316.768539] env[68569]: value = "task-3168049" [ 1316.768539] env[68569]: _type = "Task" [ 1316.768539] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.776230] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168049, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.795737] env[68569]: DEBUG nova.compute.manager [req-cfe240bc-11b6-43dd-900d-669fb6cc6a0b req-01f80ca3-d5bf-46d0-a97d-899aa165c785 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Received event network-changed-224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1316.796087] env[68569]: DEBUG nova.compute.manager [req-cfe240bc-11b6-43dd-900d-669fb6cc6a0b req-01f80ca3-d5bf-46d0-a97d-899aa165c785 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Refreshing instance network info cache due to event network-changed-224a9b72-1fe5-455e-8f12-0ba6f5dd104a. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1316.796411] env[68569]: DEBUG oslo_concurrency.lockutils [req-cfe240bc-11b6-43dd-900d-669fb6cc6a0b req-01f80ca3-d5bf-46d0-a97d-899aa165c785 service nova] Acquiring lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.796587] env[68569]: DEBUG oslo_concurrency.lockutils [req-cfe240bc-11b6-43dd-900d-669fb6cc6a0b req-01f80ca3-d5bf-46d0-a97d-899aa165c785 service nova] Acquired lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1316.796658] env[68569]: DEBUG nova.network.neutron [req-cfe240bc-11b6-43dd-900d-669fb6cc6a0b req-01f80ca3-d5bf-46d0-a97d-899aa165c785 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Refreshing network info cache for port 224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1316.819776] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1316.876610] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1316.876866] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633763', 'volume_id': '2ac71f6c-0140-45ab-b458-462aa72f76ff', 'name': 'volume-2ac71f6c-0140-45ab-b458-462aa72f76ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1055b531-4bca-4245-ab61-698c21b9e484', 'attached_at': '', 'detached_at': '', 'volume_id': '2ac71f6c-0140-45ab-b458-462aa72f76ff', 'serial': '2ac71f6c-0140-45ab-b458-462aa72f76ff'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1316.878095] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38421302-6d6c-4f1b-99c9-135d1e351199 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.893844] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4c21ed-8035-4ad2-b1d8-0b951d269bcb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.917717] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] volume-2ac71f6c-0140-45ab-b458-462aa72f76ff/volume-2ac71f6c-0140-45ab-b458-462aa72f76ff.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1316.917969] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03c3f1d3-7356-43d8-b3c5-71d49f46bb80 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.935088] env[68569]: DEBUG oslo_vmware.api [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1316.935088] env[68569]: value = "task-3168050" [ 1316.935088] env[68569]: _type = "Task" [ 1316.935088] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.942326] env[68569]: DEBUG oslo_vmware.api [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168050, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.278491] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168049, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.325177] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1317.325422] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.150s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1317.325851] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.352s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1317.326011] env[68569]: DEBUG nova.objects.instance [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lazy-loading 'resources' on Instance uuid d57645fc-82d5-4ae8-93c3-0de095a66649 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1317.446671] env[68569]: DEBUG oslo_vmware.api [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168050, 'name': ReconfigVM_Task, 'duration_secs': 0.350329} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.446936] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Reconfigured VM instance instance-00000078 to attach disk [datastore1] volume-2ac71f6c-0140-45ab-b458-462aa72f76ff/volume-2ac71f6c-0140-45ab-b458-462aa72f76ff.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1317.451566] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dba24e96-752f-41af-a1de-dac1126824b1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.465569] env[68569]: DEBUG oslo_vmware.api [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1317.465569] env[68569]: value = "task-3168051" [ 1317.465569] env[68569]: _type = "Task" [ 1317.465569] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.472915] env[68569]: DEBUG oslo_vmware.api [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168051, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.502420] env[68569]: DEBUG nova.network.neutron [req-cfe240bc-11b6-43dd-900d-669fb6cc6a0b req-01f80ca3-d5bf-46d0-a97d-899aa165c785 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Updated VIF entry in instance network info cache for port 224a9b72-1fe5-455e-8f12-0ba6f5dd104a. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1317.502766] env[68569]: DEBUG nova.network.neutron [req-cfe240bc-11b6-43dd-900d-669fb6cc6a0b req-01f80ca3-d5bf-46d0-a97d-899aa165c785 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Updating instance_info_cache with network_info: [{"id": "224a9b72-1fe5-455e-8f12-0ba6f5dd104a", "address": "fa:16:3e:e4:a9:a8", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap224a9b72-1f", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1317.779964] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168049, 'name': Rename_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.831031] env[68569]: DEBUG nova.objects.instance [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lazy-loading 'numa_topology' on Instance uuid d57645fc-82d5-4ae8-93c3-0de095a66649 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1317.975464] env[68569]: DEBUG oslo_vmware.api [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168051, 'name': ReconfigVM_Task, 'duration_secs': 0.146543} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.975794] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633763', 'volume_id': '2ac71f6c-0140-45ab-b458-462aa72f76ff', 'name': 'volume-2ac71f6c-0140-45ab-b458-462aa72f76ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1055b531-4bca-4245-ab61-698c21b9e484', 'attached_at': '', 'detached_at': '', 'volume_id': '2ac71f6c-0140-45ab-b458-462aa72f76ff', 'serial': '2ac71f6c-0140-45ab-b458-462aa72f76ff'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1318.005936] env[68569]: DEBUG oslo_concurrency.lockutils [req-cfe240bc-11b6-43dd-900d-669fb6cc6a0b req-01f80ca3-d5bf-46d0-a97d-899aa165c785 service nova] Releasing lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1318.280592] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168049, 'name': Rename_Task, 'duration_secs': 1.130809} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.281475] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1318.281475] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f30a0385-f241-4583-96aa-5f95c5d9ca70 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.288199] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1318.288199] env[68569]: value = "task-3168052" [ 1318.288199] env[68569]: _type = "Task" [ 1318.288199] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.295369] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168052, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.327081] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1318.327260] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68569) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11160}} [ 1318.332975] env[68569]: DEBUG nova.objects.base [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1318.381400] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecde3436-25cc-485e-9fb3-1a6dbe9eaa9c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.390117] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce28ba9-fcb3-4d41-a1de-fe81503ec074 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.425770] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afd17d3-fb9a-4f8f-83d7-936e561dca85 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.433819] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3bdda3a-8a16-4b7b-a2a2-9b638adb1c3c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.448153] env[68569]: DEBUG nova.compute.provider_tree [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1318.798365] env[68569]: DEBUG oslo_vmware.api [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168052, 'name': PowerOnVM_Task, 'duration_secs': 0.415012} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.798663] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1318.798849] env[68569]: DEBUG nova.compute.manager [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1318.799625] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe80e11b-77b8-4905-af9f-ac1ddac92979 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.897338] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "d57645fc-82d5-4ae8-93c3-0de095a66649" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1318.951122] env[68569]: DEBUG nova.scheduler.client.report [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1319.009703] env[68569]: DEBUG nova.objects.instance [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lazy-loading 'flavor' on Instance uuid 1055b531-4bca-4245-ab61-698c21b9e484 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1319.314874] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.456014] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.130s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.458490] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.144s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.458674] env[68569]: DEBUG nova.objects.instance [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68569) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1319.514290] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7dfdfe88-1811-45ca-af5a-deed7855f6a3 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "1055b531-4bca-4245-ab61-698c21b9e484" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.231s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.969717] env[68569]: DEBUG oslo_concurrency.lockutils [None req-9b697b61-6768-4d7e-a9e1-2fc0f3b2d287 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.762s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.970582] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.073s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.970763] env[68569]: INFO nova.compute.manager [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Unshelving [ 1320.091956] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquiring lock "612b436d-b0ac-4bda-8248-9b354201fde0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.092236] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Lock "612b436d-b0ac-4bda-8248-9b354201fde0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.092442] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquiring lock "612b436d-b0ac-4bda-8248-9b354201fde0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.092619] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Lock "612b436d-b0ac-4bda-8248-9b354201fde0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.092779] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Lock "612b436d-b0ac-4bda-8248-9b354201fde0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.094815] env[68569]: INFO nova.compute.manager [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Terminating instance [ 1320.457828] env[68569]: DEBUG nova.compute.manager [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Stashing vm_state: active {{(pid=68569) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1320.466851] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7a911609-96a3-4d89-9e7f-d8270e32bb6f tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.598674] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquiring lock "refresh_cache-612b436d-b0ac-4bda-8248-9b354201fde0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.598895] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquired lock "refresh_cache-612b436d-b0ac-4bda-8248-9b354201fde0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1320.599104] env[68569]: DEBUG nova.network.neutron [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1320.978912] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.979239] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.993736] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1321.117809] env[68569]: DEBUG nova.network.neutron [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1321.169235] env[68569]: DEBUG nova.network.neutron [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.486936] env[68569]: INFO nova.compute.claims [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1321.672382] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Releasing lock "refresh_cache-612b436d-b0ac-4bda-8248-9b354201fde0" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1321.672774] env[68569]: DEBUG nova.compute.manager [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1321.672977] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1321.673923] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9e11fd-4cfe-4820-b38b-a1e75fc8e034 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.682085] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1321.682327] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47ec5bc7-a236-4716-a420-d66b0dce427b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.688417] env[68569]: DEBUG oslo_vmware.api [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1321.688417] env[68569]: value = "task-3168053" [ 1321.688417] env[68569]: _type = "Task" [ 1321.688417] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.696740] env[68569]: DEBUG oslo_vmware.api [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168053, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.992875] env[68569]: INFO nova.compute.resource_tracker [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Updating resource usage from migration daf8ddce-1d1e-43d8-a39b-f7956ec35687 [ 1322.049740] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c017fb-e2a2-4a9b-a97a-29277a681f9a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.057089] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2a20fd-a813-465e-a11d-4d83d17ee7a7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.086506] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f878e0-5516-41f3-958f-3d0b62812863 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.092861] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50094552-b63c-4858-add6-ab1fc792129e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.105160] env[68569]: DEBUG nova.compute.provider_tree [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1322.197874] env[68569]: DEBUG oslo_vmware.api [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168053, 'name': PowerOffVM_Task, 'duration_secs': 0.150981} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.198136] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1322.198304] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1322.198543] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8fd62881-b9cc-4633-b0c9-d6ba3be1adf8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.225169] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1322.225392] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1322.225576] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Deleting the datastore file [datastore1] 612b436d-b0ac-4bda-8248-9b354201fde0 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1322.225822] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d72fad15-f6d1-413c-bfea-c237678a3bd4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.231364] env[68569]: DEBUG oslo_vmware.api [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for the task: (returnval){ [ 1322.231364] env[68569]: value = "task-3168055" [ 1322.231364] env[68569]: _type = "Task" [ 1322.231364] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.238591] env[68569]: DEBUG oslo_vmware.api [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168055, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.608010] env[68569]: DEBUG nova.scheduler.client.report [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1322.741381] env[68569]: DEBUG oslo_vmware.api [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Task: {'id': task-3168055, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106579} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.741633] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1322.741819] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1322.741997] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1322.742187] env[68569]: INFO nova.compute.manager [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1322.742423] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1322.742606] env[68569]: DEBUG nova.compute.manager [-] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1322.742699] env[68569]: DEBUG nova.network.neutron [-] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1322.757442] env[68569]: DEBUG nova.network.neutron [-] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1323.113062] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.134s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1323.113473] env[68569]: INFO nova.compute.manager [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Migrating [ 1323.119354] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.126s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1323.119558] env[68569]: DEBUG nova.objects.instance [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lazy-loading 'pci_requests' on Instance uuid d57645fc-82d5-4ae8-93c3-0de095a66649 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1323.260066] env[68569]: DEBUG nova.network.neutron [-] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1323.629960] env[68569]: DEBUG nova.objects.instance [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lazy-loading 'numa_topology' on Instance uuid d57645fc-82d5-4ae8-93c3-0de095a66649 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1323.631277] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "refresh_cache-1055b531-4bca-4245-ab61-698c21b9e484" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.631453] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired lock "refresh_cache-1055b531-4bca-4245-ab61-698c21b9e484" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1323.631681] env[68569]: DEBUG nova.network.neutron [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1323.762547] env[68569]: INFO nova.compute.manager [-] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Took 1.02 seconds to deallocate network for instance. [ 1324.133902] env[68569]: INFO nova.compute.claims [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1324.268620] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1324.334547] env[68569]: DEBUG nova.network.neutron [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Updating instance_info_cache with network_info: [{"id": "6d8ce8ac-d9c6-406f-9778-83b5837c1d87", "address": "fa:16:3e:d4:c4:21", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d8ce8ac-d9", "ovs_interfaceid": "6d8ce8ac-d9c6-406f-9778-83b5837c1d87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.837328] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lock "refresh_cache-1055b531-4bca-4245-ab61-698c21b9e484" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1325.207465] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c728311-a804-44a5-9e18-4473e198346f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.214908] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05143ef-4e6f-47de-86cd-7cf30a4ade1c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.244890] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0b41f5-f3c0-4e8c-8707-1ebf6947adae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.252430] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588115da-ac94-4534-afd5-fb268cad098f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.265864] env[68569]: DEBUG nova.compute.provider_tree [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1325.769238] env[68569]: DEBUG nova.scheduler.client.report [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1326.274244] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.155s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1326.277472] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.009s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1326.277791] env[68569]: DEBUG nova.objects.instance [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Lazy-loading 'resources' on Instance uuid 612b436d-b0ac-4bda-8248-9b354201fde0 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1326.304637] env[68569]: INFO nova.network.neutron [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Updating port 224a9b72-1fe5-455e-8f12-0ba6f5dd104a with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1326.836479] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-836e46ea-09f0-42b9-ba96-8628cc68649f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.843759] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eaaacd2-2274-4c79-8b51-07a672fd427c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.874052] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67077a47-cc7c-488d-bc4e-51dba01a431e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.880865] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf969343-a167-4769-98dc-2cd495a3d52c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.895100] env[68569]: DEBUG nova.compute.provider_tree [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.377892] env[68569]: ERROR nova.compute.manager [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Setting instance vm_state to ERROR: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1327.377892] env[68569]: ERROR nova.compute.manager [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Traceback (most recent call last): [ 1327.377892] env[68569]: ERROR nova.compute.manager [instance: 1055b531-4bca-4245-ab61-698c21b9e484] File "/opt/stack/nova/nova/compute/manager.py", line 11478, in _error_out_instance_on_exception [ 1327.377892] env[68569]: ERROR nova.compute.manager [instance: 1055b531-4bca-4245-ab61-698c21b9e484] yield [ 1327.377892] env[68569]: ERROR nova.compute.manager [instance: 1055b531-4bca-4245-ab61-698c21b9e484] File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 1327.377892] env[68569]: ERROR nova.compute.manager [instance: 1055b531-4bca-4245-ab61-698c21b9e484] disk_info = self.driver.migrate_disk_and_power_off( [ 1327.377892] env[68569]: ERROR nova.compute.manager [instance: 1055b531-4bca-4245-ab61-698c21b9e484] TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1327.377892] env[68569]: ERROR nova.compute.manager [instance: 1055b531-4bca-4245-ab61-698c21b9e484] [ 1327.398062] env[68569]: DEBUG nova.scheduler.client.report [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1327.653382] env[68569]: DEBUG nova.compute.manager [req-a1465c84-c83a-4233-ab37-44447eff6f3e req-66fccf6f-ae8c-4947-a36d-49ea45ae9502 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Received event network-vif-plugged-224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1327.653609] env[68569]: DEBUG oslo_concurrency.lockutils [req-a1465c84-c83a-4233-ab37-44447eff6f3e req-66fccf6f-ae8c-4947-a36d-49ea45ae9502 service nova] Acquiring lock "d57645fc-82d5-4ae8-93c3-0de095a66649-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1327.653812] env[68569]: DEBUG oslo_concurrency.lockutils [req-a1465c84-c83a-4233-ab37-44447eff6f3e req-66fccf6f-ae8c-4947-a36d-49ea45ae9502 service nova] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1327.653976] env[68569]: DEBUG oslo_concurrency.lockutils [req-a1465c84-c83a-4233-ab37-44447eff6f3e req-66fccf6f-ae8c-4947-a36d-49ea45ae9502 service nova] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1327.654159] env[68569]: DEBUG nova.compute.manager [req-a1465c84-c83a-4233-ab37-44447eff6f3e req-66fccf6f-ae8c-4947-a36d-49ea45ae9502 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] No waiting events found dispatching network-vif-plugged-224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1327.654324] env[68569]: WARNING nova.compute.manager [req-a1465c84-c83a-4233-ab37-44447eff6f3e req-66fccf6f-ae8c-4947-a36d-49ea45ae9502 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Received unexpected event network-vif-plugged-224a9b72-1fe5-455e-8f12-0ba6f5dd104a for instance with vm_state shelved_offloaded and task_state spawning. [ 1327.776228] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.776228] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1327.776228] env[68569]: DEBUG nova.network.neutron [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1327.896868] env[68569]: INFO nova.compute.manager [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Swapping old allocation on dict_keys(['a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6']) held by migration daf8ddce-1d1e-43d8-a39b-f7956ec35687 for instance [ 1327.903910] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.625s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1327.919563] env[68569]: INFO nova.scheduler.client.report [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Deleted allocations for instance 612b436d-b0ac-4bda-8248-9b354201fde0 [ 1327.921500] env[68569]: DEBUG nova.scheduler.client.report [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Overwriting current allocation {'allocations': {'a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 191}}, 'project_id': '335f83fe0b8e42aa80e8f0691b609649', 'user_id': '4b8671c22fa042d28350b219ac52d775', 'consumer_generation': 1} on consumer 1055b531-4bca-4245-ab61-698c21b9e484 {{(pid=68569) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1328.429290] env[68569]: DEBUG oslo_concurrency.lockutils [None req-0af4cfee-2b70-44d7-b70d-bdf7cc39990d tempest-ServerShowV257Test-601325932 tempest-ServerShowV257Test-601325932-project-member] Lock "612b436d-b0ac-4bda-8248-9b354201fde0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.337s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1328.474344] env[68569]: DEBUG nova.network.neutron [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Updating instance_info_cache with network_info: [{"id": "224a9b72-1fe5-455e-8f12-0ba6f5dd104a", "address": "fa:16:3e:e4:a9:a8", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap224a9b72-1f", "ovs_interfaceid": "224a9b72-1fe5-455e-8f12-0ba6f5dd104a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.976225] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1329.003344] env[68569]: DEBUG nova.virt.hardware [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='423b6ad07591fd7c38198534c6c471b4',container_format='bare',created_at=2025-03-26T05:02:28Z,direct_url=,disk_format='vmdk',id=148c52e9-00a5-490d-b57e-61916d114658,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1438316701-shelved',owner='6f5444e64380448bac041e3c4fd57865',properties=ImageMetaProps,protected=,size=31663104,status='active',tags=,updated_at=2025-03-26T05:02:40Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1329.003605] env[68569]: DEBUG nova.virt.hardware [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1329.003760] env[68569]: DEBUG nova.virt.hardware [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1329.003940] env[68569]: DEBUG nova.virt.hardware [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1329.004128] env[68569]: DEBUG nova.virt.hardware [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1329.004290] env[68569]: DEBUG nova.virt.hardware [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1329.004493] env[68569]: DEBUG nova.virt.hardware [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1329.004866] env[68569]: DEBUG nova.virt.hardware [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1329.005150] env[68569]: DEBUG nova.virt.hardware [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1329.005362] env[68569]: DEBUG nova.virt.hardware [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1329.005620] env[68569]: DEBUG nova.virt.hardware [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1329.006610] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6a846d-c38e-455a-a03c-e28f352a2974 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.015030] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9726390-807d-4797-9792-4b8213c90c04 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.029093] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:a9:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '224a9b72-1fe5-455e-8f12-0ba6f5dd104a', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1329.036613] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1329.037083] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1329.037298] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-100cfc3e-d071-49db-a297-2bbcd0b3b030 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.056564] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1329.056564] env[68569]: value = "task-3168056" [ 1329.056564] env[68569]: _type = "Task" [ 1329.056564] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.068179] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168056, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.479412] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.479412] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.522064] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4859131c-0020-4df7-9b84-291cbc681dd6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.528620] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7cb8ce-565a-4c82-bb21-f288d22936da {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.562689] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0fdc27-de26-44e0-ad1c-a39a48adbb94 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.571810] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168056, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.573260] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76c3c3c-441c-4a10-b553-1445fe1a139b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.586554] env[68569]: DEBUG nova.compute.provider_tree [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1329.695382] env[68569]: DEBUG nova.compute.manager [req-b0e33921-1c1a-4afa-aba4-bc3a968a1985 req-ad3e02b5-cfc0-424c-8aaf-febd75a1d121 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Received event network-changed-224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1329.695382] env[68569]: DEBUG nova.compute.manager [req-b0e33921-1c1a-4afa-aba4-bc3a968a1985 req-ad3e02b5-cfc0-424c-8aaf-febd75a1d121 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Refreshing instance network info cache due to event network-changed-224a9b72-1fe5-455e-8f12-0ba6f5dd104a. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1329.695382] env[68569]: DEBUG oslo_concurrency.lockutils [req-b0e33921-1c1a-4afa-aba4-bc3a968a1985 req-ad3e02b5-cfc0-424c-8aaf-febd75a1d121 service nova] Acquiring lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.695382] env[68569]: DEBUG oslo_concurrency.lockutils [req-b0e33921-1c1a-4afa-aba4-bc3a968a1985 req-ad3e02b5-cfc0-424c-8aaf-febd75a1d121 service nova] Acquired lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1329.695382] env[68569]: DEBUG nova.network.neutron [req-b0e33921-1c1a-4afa-aba4-bc3a968a1985 req-ad3e02b5-cfc0-424c-8aaf-febd75a1d121 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Refreshing network info cache for port 224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1329.902446] env[68569]: DEBUG oslo_concurrency.lockutils [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "1055b531-4bca-4245-ab61-698c21b9e484" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.902446] env[68569]: DEBUG oslo_concurrency.lockutils [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "1055b531-4bca-4245-ab61-698c21b9e484" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.902446] env[68569]: DEBUG oslo_concurrency.lockutils [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "1055b531-4bca-4245-ab61-698c21b9e484-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.902446] env[68569]: DEBUG oslo_concurrency.lockutils [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "1055b531-4bca-4245-ab61-698c21b9e484-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.902446] env[68569]: DEBUG oslo_concurrency.lockutils [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "1055b531-4bca-4245-ab61-698c21b9e484-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1329.903526] env[68569]: INFO nova.compute.manager [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Terminating instance [ 1330.070332] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168056, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.092565] env[68569]: DEBUG nova.scheduler.client.report [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1330.407971] env[68569]: DEBUG nova.compute.manager [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1330.408251] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1330.408529] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84ef9e24-c49b-4364-b6c7-c151dc1823d6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.416256] env[68569]: DEBUG oslo_vmware.api [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1330.416256] env[68569]: value = "task-3168057" [ 1330.416256] env[68569]: _type = "Task" [ 1330.416256] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.420671] env[68569]: DEBUG nova.network.neutron [req-b0e33921-1c1a-4afa-aba4-bc3a968a1985 req-ad3e02b5-cfc0-424c-8aaf-febd75a1d121 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Updated VIF entry in instance network info cache for port 224a9b72-1fe5-455e-8f12-0ba6f5dd104a. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1330.420999] env[68569]: DEBUG nova.network.neutron [req-b0e33921-1c1a-4afa-aba4-bc3a968a1985 req-ad3e02b5-cfc0-424c-8aaf-febd75a1d121 service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Updating instance_info_cache with network_info: [{"id": "224a9b72-1fe5-455e-8f12-0ba6f5dd104a", "address": "fa:16:3e:e4:a9:a8", "network": {"id": "d57c5811-fa37-48b6-9d26-a23aaafff9ff", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2080115511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f5444e64380448bac041e3c4fd57865", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap224a9b72-1f", "ovs_interfaceid": "224a9b72-1fe5-455e-8f12-0ba6f5dd104a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1330.424879] env[68569]: DEBUG oslo_vmware.api [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168057, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.570576] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168056, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.595923] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.117s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1330.595923] env[68569]: INFO nova.compute.manager [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Successfully reverted task state from resize_migrating on failure for instance. [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server [None req-5d2ea4ee-0836-4592-8aef-94682072ca0c tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Exception during message handling: TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 168, in decorated_function [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 159, in decorated_function [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 215, in decorated_function [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 205, in decorated_function [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6583, in resize_instance [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server raise self.value [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6580, in resize_instance [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server self._resize_instance(context, instance, image, migration, [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6624, in _resize_instance [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server disk_info = self.driver.migrate_disk_and_power_off( [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server TypeError: VMwareVCDriver.migrate_disk_and_power_off() takes from 6 to 9 positional arguments but 10 were given [ 1330.603591] env[68569]: ERROR oslo_messaging.rpc.server [ 1330.926649] env[68569]: DEBUG oslo_concurrency.lockutils [req-b0e33921-1c1a-4afa-aba4-bc3a968a1985 req-ad3e02b5-cfc0-424c-8aaf-febd75a1d121 service nova] Releasing lock "refresh_cache-d57645fc-82d5-4ae8-93c3-0de095a66649" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1330.927119] env[68569]: DEBUG oslo_vmware.api [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168057, 'name': PowerOffVM_Task, 'duration_secs': 0.226813} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.927387] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1330.927581] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1330.927772] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633763', 'volume_id': '2ac71f6c-0140-45ab-b458-462aa72f76ff', 'name': 'volume-2ac71f6c-0140-45ab-b458-462aa72f76ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1055b531-4bca-4245-ab61-698c21b9e484', 'attached_at': '', 'detached_at': '', 'volume_id': '2ac71f6c-0140-45ab-b458-462aa72f76ff', 'serial': '2ac71f6c-0140-45ab-b458-462aa72f76ff'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1330.929589] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfd5fca-864f-44fe-afd9-bb43b53ad2a8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.949300] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9535973c-d0a5-4921-9fdc-31c9afd82064 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.956041] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68810cc-6423-417c-8330-b71569b53095 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.977377] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a01ca4-777b-4d89-a9ce-a0b1341e860a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.993314] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] The volume has not been displaced from its original location: [datastore1] volume-2ac71f6c-0140-45ab-b458-462aa72f76ff/volume-2ac71f6c-0140-45ab-b458-462aa72f76ff.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1330.998453] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Reconfiguring VM instance instance-00000078 to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1330.998766] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd83365e-cf5c-4050-ace1-a001c08cf9fc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.016107] env[68569]: DEBUG oslo_vmware.api [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1331.016107] env[68569]: value = "task-3168058" [ 1331.016107] env[68569]: _type = "Task" [ 1331.016107] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.023212] env[68569]: DEBUG oslo_vmware.api [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168058, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.069959] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168056, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.525373] env[68569]: DEBUG oslo_vmware.api [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168058, 'name': ReconfigVM_Task, 'duration_secs': 0.190084} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.525670] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Reconfigured VM instance instance-00000078 to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1331.530476] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e2e36c9-7c39-400c-ae8d-e23edb8882ea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.545581] env[68569]: DEBUG oslo_vmware.api [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1331.545581] env[68569]: value = "task-3168059" [ 1331.545581] env[68569]: _type = "Task" [ 1331.545581] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.555442] env[68569]: DEBUG oslo_vmware.api [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168059, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.569904] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168056, 'name': CreateVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.056378] env[68569]: DEBUG oslo_vmware.api [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168059, 'name': ReconfigVM_Task, 'duration_secs': 0.185019} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.056691] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633763', 'volume_id': '2ac71f6c-0140-45ab-b458-462aa72f76ff', 'name': 'volume-2ac71f6c-0140-45ab-b458-462aa72f76ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1055b531-4bca-4245-ab61-698c21b9e484', 'attached_at': '', 'detached_at': '', 'volume_id': '2ac71f6c-0140-45ab-b458-462aa72f76ff', 'serial': '2ac71f6c-0140-45ab-b458-462aa72f76ff'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1332.057025] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1332.057788] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0796389-723e-4f7e-99f5-c2441e6e66c3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.064439] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1332.067554] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bbf6394-f407-46b5-ab97-12988de3110b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.074484] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168056, 'name': CreateVM_Task, 'duration_secs': 2.987372} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.074643] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1332.075513] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/148c52e9-00a5-490d-b57e-61916d114658" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1332.075674] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired lock "[datastore1] devstack-image-cache_base/148c52e9-00a5-490d-b57e-61916d114658" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1332.076039] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/148c52e9-00a5-490d-b57e-61916d114658" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1332.076393] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6571efb-f6c4-4743-a2c5-5a76e75a2274 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.080820] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1332.080820] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ea057f-8338-4526-8832-4b2f7e763327" [ 1332.080820] env[68569]: _type = "Task" [ 1332.080820] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.089553] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ea057f-8338-4526-8832-4b2f7e763327, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.132407] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1332.132407] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1332.132734] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Deleting the datastore file [datastore2] 1055b531-4bca-4245-ab61-698c21b9e484 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1332.132734] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-51d75606-5cbe-4061-87dd-db2c80d4ae46 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.139859] env[68569]: DEBUG oslo_vmware.api [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1332.139859] env[68569]: value = "task-3168061" [ 1332.139859] env[68569]: _type = "Task" [ 1332.139859] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.147907] env[68569]: DEBUG oslo_vmware.api [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168061, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.590456] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lock "[datastore1] devstack-image-cache_base/148c52e9-00a5-490d-b57e-61916d114658" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1332.590721] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Processing image 148c52e9-00a5-490d-b57e-61916d114658 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1332.590951] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/148c52e9-00a5-490d-b57e-61916d114658/148c52e9-00a5-490d-b57e-61916d114658.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1332.591116] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquired lock "[datastore1] devstack-image-cache_base/148c52e9-00a5-490d-b57e-61916d114658/148c52e9-00a5-490d-b57e-61916d114658.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1332.591293] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1332.591545] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74f2cc4c-7a8f-4af4-a639-e1b6dab4d52c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.600088] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1332.600280] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1332.600978] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45aa222d-5500-48ee-9ed4-a1bbe6796163 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.605723] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1332.605723] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520eaae7-a4d0-507f-ea10-fc0ccfe244b8" [ 1332.605723] env[68569]: _type = "Task" [ 1332.605723] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.615018] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]520eaae7-a4d0-507f-ea10-fc0ccfe244b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.651160] env[68569]: DEBUG oslo_vmware.api [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168061, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190067} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.651160] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1332.651160] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1332.651160] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1332.651160] env[68569]: INFO nova.compute.manager [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Took 2.24 seconds to destroy the instance on the hypervisor. [ 1332.651443] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1332.651443] env[68569]: DEBUG nova.compute.manager [-] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1332.651511] env[68569]: DEBUG nova.network.neutron [-] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1333.120033] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Preparing fetch location {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1333.120425] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Fetch image to [datastore1] OSTACK_IMG_9c17e415-db6f-4edd-94f8-a631c3875757/OSTACK_IMG_9c17e415-db6f-4edd-94f8-a631c3875757.vmdk {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1333.120487] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Downloading stream optimized image 148c52e9-00a5-490d-b57e-61916d114658 to [datastore1] OSTACK_IMG_9c17e415-db6f-4edd-94f8-a631c3875757/OSTACK_IMG_9c17e415-db6f-4edd-94f8-a631c3875757.vmdk on the data store datastore1 as vApp {{(pid=68569) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1333.120763] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Downloading image file data 148c52e9-00a5-490d-b57e-61916d114658 to the ESX as VM named 'OSTACK_IMG_9c17e415-db6f-4edd-94f8-a631c3875757' {{(pid=68569) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1333.192871] env[68569]: DEBUG nova.compute.manager [req-d2e9f9f8-979c-4d8e-b7a3-d7a12102af22 req-74128d4d-0523-47dc-8edc-c6bf315436a3 service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Received event network-vif-deleted-6d8ce8ac-d9c6-406f-9778-83b5837c1d87 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1333.192871] env[68569]: INFO nova.compute.manager [req-d2e9f9f8-979c-4d8e-b7a3-d7a12102af22 req-74128d4d-0523-47dc-8edc-c6bf315436a3 service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Neutron deleted interface 6d8ce8ac-d9c6-406f-9778-83b5837c1d87; detaching it from the instance and deleting it from the info cache [ 1333.192871] env[68569]: DEBUG nova.network.neutron [req-d2e9f9f8-979c-4d8e-b7a3-d7a12102af22 req-74128d4d-0523-47dc-8edc-c6bf315436a3 service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.210028] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1333.210028] env[68569]: value = "resgroup-9" [ 1333.210028] env[68569]: _type = "ResourcePool" [ 1333.210028] env[68569]: }. {{(pid=68569) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1333.210823] env[68569]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-e9fb6a74-fb28-42b3-9cec-3e84d9c56c01 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.237789] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lease: (returnval){ [ 1333.237789] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52af73e4-4f5d-e686-b6b0-f0af779122d4" [ 1333.237789] env[68569]: _type = "HttpNfcLease" [ 1333.237789] env[68569]: } obtained for vApp import into resource pool (val){ [ 1333.237789] env[68569]: value = "resgroup-9" [ 1333.237789] env[68569]: _type = "ResourcePool" [ 1333.237789] env[68569]: }. {{(pid=68569) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1333.238081] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the lease: (returnval){ [ 1333.238081] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52af73e4-4f5d-e686-b6b0-f0af779122d4" [ 1333.238081] env[68569]: _type = "HttpNfcLease" [ 1333.238081] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1333.248085] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1333.248085] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52af73e4-4f5d-e686-b6b0-f0af779122d4" [ 1333.248085] env[68569]: _type = "HttpNfcLease" [ 1333.248085] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1333.665103] env[68569]: DEBUG nova.network.neutron [-] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.692719] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6ed5846-f7a9-48c1-95c2-8ebd9e30b00c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.707265] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f47239-4396-43c2-b8a8-2cd930042643 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.735453] env[68569]: DEBUG nova.compute.manager [req-d2e9f9f8-979c-4d8e-b7a3-d7a12102af22 req-74128d4d-0523-47dc-8edc-c6bf315436a3 service nova] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Detach interface failed, port_id=6d8ce8ac-d9c6-406f-9778-83b5837c1d87, reason: Instance 1055b531-4bca-4245-ab61-698c21b9e484 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1333.745646] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1333.745646] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52af73e4-4f5d-e686-b6b0-f0af779122d4" [ 1333.745646] env[68569]: _type = "HttpNfcLease" [ 1333.745646] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1333.745939] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1333.745939] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52af73e4-4f5d-e686-b6b0-f0af779122d4" [ 1333.745939] env[68569]: _type = "HttpNfcLease" [ 1333.745939] env[68569]: }. {{(pid=68569) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1333.746685] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435a2b7a-20a8-424d-8be6-4473db60570a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.754653] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522d576c-be0b-dbe5-13ba-5504a27e5382/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1333.754880] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Creating HTTP connection to write to file with size = 31663104 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522d576c-be0b-dbe5-13ba-5504a27e5382/disk-0.vmdk. {{(pid=68569) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1333.819925] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Acquiring lock "d593a549-45d3-4ae2-a4ac-96e945a762e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1333.820175] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Lock "d593a549-45d3-4ae2-a4ac-96e945a762e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1333.828017] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-164b6aa0-2d43-451a-907b-45a860bea414 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.168252] env[68569]: INFO nova.compute.manager [-] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Took 1.52 seconds to deallocate network for instance. [ 1334.323141] env[68569]: DEBUG nova.compute.manager [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1334.721574] env[68569]: INFO nova.compute.manager [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Took 0.55 seconds to detach 1 volumes for instance. [ 1334.849274] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.849274] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1334.852305] env[68569]: INFO nova.compute.claims [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1334.881259] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Completed reading data from the image iterator. {{(pid=68569) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1334.881827] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522d576c-be0b-dbe5-13ba-5504a27e5382/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1334.882847] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68befa2-8611-468c-b32b-2d87e0f16b6b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.889547] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522d576c-be0b-dbe5-13ba-5504a27e5382/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1334.889874] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522d576c-be0b-dbe5-13ba-5504a27e5382/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1334.890399] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-408b3c9c-2c44-40a6-b253-5234f579b00f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.067241] env[68569]: DEBUG oslo_vmware.rw_handles [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522d576c-be0b-dbe5-13ba-5504a27e5382/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1335.067523] env[68569]: INFO nova.virt.vmwareapi.images [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Downloaded image file data 148c52e9-00a5-490d-b57e-61916d114658 [ 1335.068456] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb68601-152c-4b84-8301-fbf7abb77781 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.083365] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9049879e-f037-4323-8671-07e7f3061270 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.112810] env[68569]: INFO nova.virt.vmwareapi.images [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] The imported VM was unregistered [ 1335.115131] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Caching image {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1335.115359] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Creating directory with path [datastore1] devstack-image-cache_base/148c52e9-00a5-490d-b57e-61916d114658 {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1335.115600] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e3ec70c-222f-45a2-9f6e-6c6b8566734e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.124612] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Created directory with path [datastore1] devstack-image-cache_base/148c52e9-00a5-490d-b57e-61916d114658 {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1335.124790] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_9c17e415-db6f-4edd-94f8-a631c3875757/OSTACK_IMG_9c17e415-db6f-4edd-94f8-a631c3875757.vmdk to [datastore1] devstack-image-cache_base/148c52e9-00a5-490d-b57e-61916d114658/148c52e9-00a5-490d-b57e-61916d114658.vmdk. {{(pid=68569) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1335.125014] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-134b947e-6a8b-41cd-9288-c5cd03fe21b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.130182] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1335.130182] env[68569]: value = "task-3168064" [ 1335.130182] env[68569]: _type = "Task" [ 1335.130182] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.137445] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168064, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.230305] env[68569]: DEBUG oslo_concurrency.lockutils [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.642760] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168064, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.921359] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7f0e9f-a38a-4c7e-a01c-ff305ffd2e79 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.932499] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c023c756-20b3-487e-b7d7-c881e954ac0d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.968413] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561eb86a-5fa2-4bac-bd16-73cb9a929355 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.979580] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc91521-a507-4ba0-a060-f3afa43267ae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.999719] env[68569]: DEBUG nova.compute.provider_tree [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1336.145292] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168064, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.503731] env[68569]: DEBUG nova.scheduler.client.report [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1336.646697] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168064, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.009511] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.161s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1337.010109] env[68569]: DEBUG nova.compute.manager [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1337.013107] env[68569]: DEBUG oslo_concurrency.lockutils [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.783s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.013400] env[68569]: DEBUG nova.objects.instance [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lazy-loading 'resources' on Instance uuid 1055b531-4bca-4245-ab61-698c21b9e484 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1337.145166] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168064, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.516199] env[68569]: DEBUG nova.compute.utils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1337.520530] env[68569]: DEBUG nova.compute.manager [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1337.520738] env[68569]: DEBUG nova.network.neutron [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1337.555430] env[68569]: DEBUG nova.policy [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7751d47c79474fd7ad2746eda1bde14b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3abf9360a13b482eb9ab88e8857c7e05', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1337.568048] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0c015c-30f5-4777-ae03-658caf9d4029 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.575664] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256e713b-01ff-4d8c-bd82-aedd790ca883 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.606883] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c07697-cb33-4d5a-add5-27a875659163 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.614006] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44ecc1a-28bc-4c68-af44-cd5a9853819c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.626716] env[68569]: DEBUG nova.compute.provider_tree [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1337.642092] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168064, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.108411} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.642303] env[68569]: INFO nova.virt.vmwareapi.ds_util [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_9c17e415-db6f-4edd-94f8-a631c3875757/OSTACK_IMG_9c17e415-db6f-4edd-94f8-a631c3875757.vmdk to [datastore1] devstack-image-cache_base/148c52e9-00a5-490d-b57e-61916d114658/148c52e9-00a5-490d-b57e-61916d114658.vmdk. [ 1337.642479] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Cleaning up location [datastore1] OSTACK_IMG_9c17e415-db6f-4edd-94f8-a631c3875757 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1337.642657] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_9c17e415-db6f-4edd-94f8-a631c3875757 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1337.642879] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a55b3de5-e7c0-489f-95ec-a0b5a95d40fe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.649286] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1337.649286] env[68569]: value = "task-3168065" [ 1337.649286] env[68569]: _type = "Task" [ 1337.649286] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.657248] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168065, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.780494] env[68569]: DEBUG nova.network.neutron [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Successfully created port: 29d83766-922d-496b-89a8-3c0762656469 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1338.021553] env[68569]: DEBUG nova.compute.manager [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1338.130283] env[68569]: DEBUG nova.scheduler.client.report [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1338.158480] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168065, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.039315} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.158737] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1338.158883] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Releasing lock "[datastore1] devstack-image-cache_base/148c52e9-00a5-490d-b57e-61916d114658/148c52e9-00a5-490d-b57e-61916d114658.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1338.159161] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/148c52e9-00a5-490d-b57e-61916d114658/148c52e9-00a5-490d-b57e-61916d114658.vmdk to [datastore1] d57645fc-82d5-4ae8-93c3-0de095a66649/d57645fc-82d5-4ae8-93c3-0de095a66649.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1338.159408] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f720daac-e84a-4c7f-85fb-4ed99ba5d6e7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.165892] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1338.165892] env[68569]: value = "task-3168066" [ 1338.165892] env[68569]: _type = "Task" [ 1338.165892] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.173058] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168066, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.634557] env[68569]: DEBUG oslo_concurrency.lockutils [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.621s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1338.660011] env[68569]: INFO nova.scheduler.client.report [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Deleted allocations for instance 1055b531-4bca-4245-ab61-698c21b9e484 [ 1338.680764] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168066, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.031927] env[68569]: DEBUG nova.compute.manager [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1339.059244] env[68569]: DEBUG nova.virt.hardware [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1339.059539] env[68569]: DEBUG nova.virt.hardware [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1339.059692] env[68569]: DEBUG nova.virt.hardware [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1339.059907] env[68569]: DEBUG nova.virt.hardware [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1339.060014] env[68569]: DEBUG nova.virt.hardware [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1339.060276] env[68569]: DEBUG nova.virt.hardware [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1339.060569] env[68569]: DEBUG nova.virt.hardware [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1339.060754] env[68569]: DEBUG nova.virt.hardware [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1339.060926] env[68569]: DEBUG nova.virt.hardware [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1339.061102] env[68569]: DEBUG nova.virt.hardware [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1339.061282] env[68569]: DEBUG nova.virt.hardware [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1339.062210] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b259d494-5bc9-4dfc-a951-d2c7986e29c2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.074265] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e4e723-1739-426e-b965-dddc43bd450e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.163869] env[68569]: DEBUG nova.compute.manager [req-8600905f-cff2-47be-b1f4-5f3c8723df7a req-a98a87fc-2e3c-443e-96cd-dd686bf21fb5 service nova] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Received event network-vif-plugged-29d83766-922d-496b-89a8-3c0762656469 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1339.163987] env[68569]: DEBUG oslo_concurrency.lockutils [req-8600905f-cff2-47be-b1f4-5f3c8723df7a req-a98a87fc-2e3c-443e-96cd-dd686bf21fb5 service nova] Acquiring lock "d593a549-45d3-4ae2-a4ac-96e945a762e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1339.164143] env[68569]: DEBUG oslo_concurrency.lockutils [req-8600905f-cff2-47be-b1f4-5f3c8723df7a req-a98a87fc-2e3c-443e-96cd-dd686bf21fb5 service nova] Lock "d593a549-45d3-4ae2-a4ac-96e945a762e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1339.164312] env[68569]: DEBUG oslo_concurrency.lockutils [req-8600905f-cff2-47be-b1f4-5f3c8723df7a req-a98a87fc-2e3c-443e-96cd-dd686bf21fb5 service nova] Lock "d593a549-45d3-4ae2-a4ac-96e945a762e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1339.164519] env[68569]: DEBUG nova.compute.manager [req-8600905f-cff2-47be-b1f4-5f3c8723df7a req-a98a87fc-2e3c-443e-96cd-dd686bf21fb5 service nova] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] No waiting events found dispatching network-vif-plugged-29d83766-922d-496b-89a8-3c0762656469 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1339.164630] env[68569]: WARNING nova.compute.manager [req-8600905f-cff2-47be-b1f4-5f3c8723df7a req-a98a87fc-2e3c-443e-96cd-dd686bf21fb5 service nova] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Received unexpected event network-vif-plugged-29d83766-922d-496b-89a8-3c0762656469 for instance with vm_state building and task_state spawning. [ 1339.169043] env[68569]: DEBUG oslo_concurrency.lockutils [None req-77a15f07-1800-4d25-8de4-dbe3460101ae tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "1055b531-4bca-4245-ab61-698c21b9e484" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.269s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1339.183216] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168066, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.250320] env[68569]: DEBUG nova.network.neutron [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Successfully updated port: 29d83766-922d-496b-89a8-3c0762656469 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1339.684557] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168066, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.753508] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Acquiring lock "refresh_cache-d593a549-45d3-4ae2-a4ac-96e945a762e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.753738] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Acquired lock "refresh_cache-d593a549-45d3-4ae2-a4ac-96e945a762e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1339.753819] env[68569]: DEBUG nova.network.neutron [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1340.183265] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168066, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.286947] env[68569]: DEBUG nova.network.neutron [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1340.439893] env[68569]: DEBUG nova.network.neutron [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Updating instance_info_cache with network_info: [{"id": "29d83766-922d-496b-89a8-3c0762656469", "address": "fa:16:3e:f9:7b:82", "network": {"id": "bffd9ad6-373e-4f95-ab86-44fd0e590bda", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-815562165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3abf9360a13b482eb9ab88e8857c7e05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3fca0ab6-cc80-429f-9117-885f170135b7", "external-id": "nsx-vlan-transportzone-393", "segmentation_id": 393, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29d83766-92", "ovs_interfaceid": "29d83766-922d-496b-89a8-3c0762656469", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.550859] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "34a807d6-1fdd-47d8-a72e-bf2345de321b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1340.551092] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1340.678959] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168066, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.473658} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.679355] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/148c52e9-00a5-490d-b57e-61916d114658/148c52e9-00a5-490d-b57e-61916d114658.vmdk to [datastore1] d57645fc-82d5-4ae8-93c3-0de095a66649/d57645fc-82d5-4ae8-93c3-0de095a66649.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1340.680098] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2911fee1-7841-493d-baeb-a70205adb03d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.701952] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] d57645fc-82d5-4ae8-93c3-0de095a66649/d57645fc-82d5-4ae8-93c3-0de095a66649.vmdk or device None with type streamOptimized {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1340.702616] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23e034b0-cbea-484b-afeb-0c9e1f5122f1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.722563] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1340.722563] env[68569]: value = "task-3168067" [ 1340.722563] env[68569]: _type = "Task" [ 1340.722563] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.730964] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168067, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.943451] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Releasing lock "refresh_cache-d593a549-45d3-4ae2-a4ac-96e945a762e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1340.943910] env[68569]: DEBUG nova.compute.manager [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Instance network_info: |[{"id": "29d83766-922d-496b-89a8-3c0762656469", "address": "fa:16:3e:f9:7b:82", "network": {"id": "bffd9ad6-373e-4f95-ab86-44fd0e590bda", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-815562165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3abf9360a13b482eb9ab88e8857c7e05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3fca0ab6-cc80-429f-9117-885f170135b7", "external-id": "nsx-vlan-transportzone-393", "segmentation_id": 393, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29d83766-92", "ovs_interfaceid": "29d83766-922d-496b-89a8-3c0762656469", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1340.944536] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:7b:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3fca0ab6-cc80-429f-9117-885f170135b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29d83766-922d-496b-89a8-3c0762656469', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1340.953212] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Creating folder: Project (3abf9360a13b482eb9ab88e8857c7e05). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1340.953515] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59cd00da-6f96-4c6f-be6a-85e7ef0a766a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.963642] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Created folder: Project (3abf9360a13b482eb9ab88e8857c7e05) in parent group-v633430. [ 1340.963873] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Creating folder: Instances. Parent ref: group-v633767. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1340.964102] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ee790f4-d019-473e-8da8-d4fc19c764e5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.972437] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Created folder: Instances in parent group-v633767. [ 1340.972759] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1340.972913] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1340.973248] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-674e3480-69c0-48b0-a637-707e36c202fa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.993286] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1340.993286] env[68569]: value = "task-3168070" [ 1340.993286] env[68569]: _type = "Task" [ 1340.993286] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.000686] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168070, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.053615] env[68569]: DEBUG nova.compute.manager [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1341.191401] env[68569]: DEBUG nova.compute.manager [req-37e4dc99-e595-4455-adf0-29349e982f78 req-cd21ef36-4bb7-4ff6-8a24-435fcdc36312 service nova] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Received event network-changed-29d83766-922d-496b-89a8-3c0762656469 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1341.191720] env[68569]: DEBUG nova.compute.manager [req-37e4dc99-e595-4455-adf0-29349e982f78 req-cd21ef36-4bb7-4ff6-8a24-435fcdc36312 service nova] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Refreshing instance network info cache due to event network-changed-29d83766-922d-496b-89a8-3c0762656469. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1341.192071] env[68569]: DEBUG oslo_concurrency.lockutils [req-37e4dc99-e595-4455-adf0-29349e982f78 req-cd21ef36-4bb7-4ff6-8a24-435fcdc36312 service nova] Acquiring lock "refresh_cache-d593a549-45d3-4ae2-a4ac-96e945a762e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.192325] env[68569]: DEBUG oslo_concurrency.lockutils [req-37e4dc99-e595-4455-adf0-29349e982f78 req-cd21ef36-4bb7-4ff6-8a24-435fcdc36312 service nova] Acquired lock "refresh_cache-d593a549-45d3-4ae2-a4ac-96e945a762e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1341.192633] env[68569]: DEBUG nova.network.neutron [req-37e4dc99-e595-4455-adf0-29349e982f78 req-cd21ef36-4bb7-4ff6-8a24-435fcdc36312 service nova] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Refreshing network info cache for port 29d83766-922d-496b-89a8-3c0762656469 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1341.233523] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168067, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.502980] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168070, 'name': CreateVM_Task, 'duration_secs': 0.43775} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.503189] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1341.503775] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.503946] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1341.504296] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1341.504885] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d574ca8-d9b0-48eb-ada9-4600f51ba707 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.509408] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Waiting for the task: (returnval){ [ 1341.509408] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fc2e24-15b3-7453-ecf9-baf4f925b600" [ 1341.509408] env[68569]: _type = "Task" [ 1341.509408] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.517346] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fc2e24-15b3-7453-ecf9-baf4f925b600, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.581656] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.581928] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.583484] env[68569]: INFO nova.compute.claims [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1341.732892] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168067, 'name': ReconfigVM_Task, 'duration_secs': 0.718495} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.733239] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Reconfigured VM instance instance-00000077 to attach disk [datastore1] d57645fc-82d5-4ae8-93c3-0de095a66649/d57645fc-82d5-4ae8-93c3-0de095a66649.vmdk or device None with type streamOptimized {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1341.733829] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1fc32388-735b-4712-a8e6-694b16411895 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.740084] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1341.740084] env[68569]: value = "task-3168071" [ 1341.740084] env[68569]: _type = "Task" [ 1341.740084] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.747286] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168071, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.876310] env[68569]: DEBUG nova.network.neutron [req-37e4dc99-e595-4455-adf0-29349e982f78 req-cd21ef36-4bb7-4ff6-8a24-435fcdc36312 service nova] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Updated VIF entry in instance network info cache for port 29d83766-922d-496b-89a8-3c0762656469. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1341.876729] env[68569]: DEBUG nova.network.neutron [req-37e4dc99-e595-4455-adf0-29349e982f78 req-cd21ef36-4bb7-4ff6-8a24-435fcdc36312 service nova] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Updating instance_info_cache with network_info: [{"id": "29d83766-922d-496b-89a8-3c0762656469", "address": "fa:16:3e:f9:7b:82", "network": {"id": "bffd9ad6-373e-4f95-ab86-44fd0e590bda", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-815562165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3abf9360a13b482eb9ab88e8857c7e05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3fca0ab6-cc80-429f-9117-885f170135b7", "external-id": "nsx-vlan-transportzone-393", "segmentation_id": 393, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29d83766-92", "ovs_interfaceid": "29d83766-922d-496b-89a8-3c0762656469", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1342.020979] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fc2e24-15b3-7453-ecf9-baf4f925b600, 'name': SearchDatastore_Task, 'duration_secs': 0.010701} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.020979] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1342.021244] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1342.021325] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.021469] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1342.021646] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1342.021903] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f0d2c34-6206-4f48-857b-b407104e1140 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.029683] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1342.029844] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1342.030551] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dc03cac-3ab7-4188-839d-4358264802fd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.035346] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Waiting for the task: (returnval){ [ 1342.035346] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52afb13f-499f-b93d-1297-50165eb8f7e6" [ 1342.035346] env[68569]: _type = "Task" [ 1342.035346] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.042462] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52afb13f-499f-b93d-1297-50165eb8f7e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.249948] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168071, 'name': Rename_Task, 'duration_secs': 0.133479} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.250262] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1342.250505] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ead23b9b-7419-4f3d-9a34-c70c51e8b1c1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.256846] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1342.256846] env[68569]: value = "task-3168072" [ 1342.256846] env[68569]: _type = "Task" [ 1342.256846] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.263813] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168072, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.380305] env[68569]: DEBUG oslo_concurrency.lockutils [req-37e4dc99-e595-4455-adf0-29349e982f78 req-cd21ef36-4bb7-4ff6-8a24-435fcdc36312 service nova] Releasing lock "refresh_cache-d593a549-45d3-4ae2-a4ac-96e945a762e3" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1342.546077] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52afb13f-499f-b93d-1297-50165eb8f7e6, 'name': SearchDatastore_Task, 'duration_secs': 0.00948} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.546514] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-281660db-455c-45b2-a2ea-f59c8fc4b836 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.551685] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Waiting for the task: (returnval){ [ 1342.551685] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52267276-8ba7-36b8-b71b-02e2379be963" [ 1342.551685] env[68569]: _type = "Task" [ 1342.551685] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.559151] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52267276-8ba7-36b8-b71b-02e2379be963, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.640037] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1e21dc-a1be-4040-a399-9def451c9cbe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.647459] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34af0049-edb6-472f-a5a1-5f0efe10c47b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.675956] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc801fc4-5e9b-499a-99c1-958b9d1e1902 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.682517] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c817c73-3b1e-4aa1-9fe8-c987821985ed {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.694901] env[68569]: DEBUG nova.compute.provider_tree [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1342.766295] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168072, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.061433] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52267276-8ba7-36b8-b71b-02e2379be963, 'name': SearchDatastore_Task, 'duration_secs': 0.011565} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.061690] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1343.061943] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] d593a549-45d3-4ae2-a4ac-96e945a762e3/d593a549-45d3-4ae2-a4ac-96e945a762e3.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1343.062217] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67958f88-3177-4bf1-a98e-c1b101b446fe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.068322] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Waiting for the task: (returnval){ [ 1343.068322] env[68569]: value = "task-3168073" [ 1343.068322] env[68569]: _type = "Task" [ 1343.068322] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.075861] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168073, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.198257] env[68569]: DEBUG nova.scheduler.client.report [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1343.266960] env[68569]: DEBUG oslo_vmware.api [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168072, 'name': PowerOnVM_Task, 'duration_secs': 0.663883} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.267247] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1343.376689] env[68569]: DEBUG nova.compute.manager [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1343.377928] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5607cc-c99c-4a4f-93b5-11e6887c93f9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.580425] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168073, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.705034] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.122s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1343.705034] env[68569]: DEBUG nova.compute.manager [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1343.898353] env[68569]: DEBUG oslo_concurrency.lockutils [None req-c529332d-7a68-4342-93ad-8b675c0b3060 tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 23.927s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1344.079612] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168073, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.712862} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.079612] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] d593a549-45d3-4ae2-a4ac-96e945a762e3/d593a549-45d3-4ae2-a4ac-96e945a762e3.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1344.079821] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1344.080081] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a228f50f-d3e4-4a47-886e-a9309933856f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.086205] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Waiting for the task: (returnval){ [ 1344.086205] env[68569]: value = "task-3168074" [ 1344.086205] env[68569]: _type = "Task" [ 1344.086205] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.093986] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168074, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.211218] env[68569]: DEBUG nova.compute.utils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1344.212681] env[68569]: DEBUG nova.compute.manager [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1344.212858] env[68569]: DEBUG nova.network.neutron [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1344.258951] env[68569]: DEBUG nova.policy [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b8671c22fa042d28350b219ac52d775', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '335f83fe0b8e42aa80e8f0691b609649', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1344.500934] env[68569]: DEBUG nova.network.neutron [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Successfully created port: b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1344.597367] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168074, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063913} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.597367] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1344.598405] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f665ac1-d5b9-4c30-aa4d-6ce6f0be3d02 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.620431] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] d593a549-45d3-4ae2-a4ac-96e945a762e3/d593a549-45d3-4ae2-a4ac-96e945a762e3.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1344.620720] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-176f8ee7-db40-4c88-90fa-bf1fb7ff9681 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.641830] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Waiting for the task: (returnval){ [ 1344.641830] env[68569]: value = "task-3168075" [ 1344.641830] env[68569]: _type = "Task" [ 1344.641830] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.649204] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168075, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.716257] env[68569]: DEBUG nova.compute.manager [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1345.153136] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.653282] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168075, 'name': ReconfigVM_Task, 'duration_secs': 0.724732} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.653565] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Reconfigured VM instance instance-0000007b to attach disk [datastore2] d593a549-45d3-4ae2-a4ac-96e945a762e3/d593a549-45d3-4ae2-a4ac-96e945a762e3.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1345.654194] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ff2d47c-6f47-471c-9ccb-b5f78f4b33ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.660946] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Waiting for the task: (returnval){ [ 1345.660946] env[68569]: value = "task-3168076" [ 1345.660946] env[68569]: _type = "Task" [ 1345.660946] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.668232] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168076, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.727743] env[68569]: DEBUG nova.compute.manager [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1345.753356] env[68569]: DEBUG nova.virt.hardware [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1345.753605] env[68569]: DEBUG nova.virt.hardware [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1345.753762] env[68569]: DEBUG nova.virt.hardware [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1345.753939] env[68569]: DEBUG nova.virt.hardware [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1345.754094] env[68569]: DEBUG nova.virt.hardware [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1345.754244] env[68569]: DEBUG nova.virt.hardware [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1345.754453] env[68569]: DEBUG nova.virt.hardware [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1345.754610] env[68569]: DEBUG nova.virt.hardware [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1345.754837] env[68569]: DEBUG nova.virt.hardware [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1345.755034] env[68569]: DEBUG nova.virt.hardware [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1345.755215] env[68569]: DEBUG nova.virt.hardware [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1345.756109] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693f2e41-b33c-4d25-9f6b-760c62cb9e50 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.763606] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7316b4e5-9dc4-4a8f-bc41-c47d4a291f02 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.848152] env[68569]: DEBUG nova.compute.manager [req-64523561-2dfe-4d38-910f-a1c7c505632c req-77e56b90-fb84-45fc-85c2-ad08d2b92202 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Received event network-vif-plugged-b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1345.848383] env[68569]: DEBUG oslo_concurrency.lockutils [req-64523561-2dfe-4d38-910f-a1c7c505632c req-77e56b90-fb84-45fc-85c2-ad08d2b92202 service nova] Acquiring lock "34a807d6-1fdd-47d8-a72e-bf2345de321b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1345.848586] env[68569]: DEBUG oslo_concurrency.lockutils [req-64523561-2dfe-4d38-910f-a1c7c505632c req-77e56b90-fb84-45fc-85c2-ad08d2b92202 service nova] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1345.848751] env[68569]: DEBUG oslo_concurrency.lockutils [req-64523561-2dfe-4d38-910f-a1c7c505632c req-77e56b90-fb84-45fc-85c2-ad08d2b92202 service nova] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1345.848919] env[68569]: DEBUG nova.compute.manager [req-64523561-2dfe-4d38-910f-a1c7c505632c req-77e56b90-fb84-45fc-85c2-ad08d2b92202 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] No waiting events found dispatching network-vif-plugged-b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1345.849091] env[68569]: WARNING nova.compute.manager [req-64523561-2dfe-4d38-910f-a1c7c505632c req-77e56b90-fb84-45fc-85c2-ad08d2b92202 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Received unexpected event network-vif-plugged-b83be58c-0aca-40bd-b217-035d5fadc9a1 for instance with vm_state building and task_state spawning. [ 1345.925606] env[68569]: DEBUG nova.network.neutron [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Successfully updated port: b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1346.170577] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168076, 'name': Rename_Task, 'duration_secs': 0.496298} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.170940] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1346.171017] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-efa1bf15-e3cc-4e66-9862-1ae7c181ee60 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.176611] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Waiting for the task: (returnval){ [ 1346.176611] env[68569]: value = "task-3168077" [ 1346.176611] env[68569]: _type = "Task" [ 1346.176611] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.183910] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168077, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.428360] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.428360] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1346.428360] env[68569]: DEBUG nova.network.neutron [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1346.686410] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168077, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.958747] env[68569]: DEBUG nova.network.neutron [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1347.078912] env[68569]: DEBUG nova.network.neutron [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Updating instance_info_cache with network_info: [{"id": "b83be58c-0aca-40bd-b217-035d5fadc9a1", "address": "fa:16:3e:f3:2d:a1", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb83be58c-0a", "ovs_interfaceid": "b83be58c-0aca-40bd-b217-035d5fadc9a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.187749] env[68569]: DEBUG oslo_vmware.api [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168077, 'name': PowerOnVM_Task, 'duration_secs': 0.704592} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.188163] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1347.188273] env[68569]: INFO nova.compute.manager [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Took 8.16 seconds to spawn the instance on the hypervisor. [ 1347.188371] env[68569]: DEBUG nova.compute.manager [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1347.189104] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9529cad-acba-424c-a460-0985052f2c74 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.581966] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1347.582324] env[68569]: DEBUG nova.compute.manager [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Instance network_info: |[{"id": "b83be58c-0aca-40bd-b217-035d5fadc9a1", "address": "fa:16:3e:f3:2d:a1", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb83be58c-0a", "ovs_interfaceid": "b83be58c-0aca-40bd-b217-035d5fadc9a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1347.582754] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:2d:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b83be58c-0aca-40bd-b217-035d5fadc9a1', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1347.590159] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1347.590393] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1347.590625] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85187af0-edf0-4c21-af12-6ec84bc6bbda {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.610401] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1347.610401] env[68569]: value = "task-3168078" [ 1347.610401] env[68569]: _type = "Task" [ 1347.610401] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.617695] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168078, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.706888] env[68569]: INFO nova.compute.manager [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Took 12.88 seconds to build instance. [ 1347.874570] env[68569]: DEBUG nova.compute.manager [req-be0cb547-aaf4-4e19-8c18-93d26e78b6f5 req-2e23f759-1a5a-4847-9449-eec881131662 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Received event network-changed-b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1347.874936] env[68569]: DEBUG nova.compute.manager [req-be0cb547-aaf4-4e19-8c18-93d26e78b6f5 req-2e23f759-1a5a-4847-9449-eec881131662 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Refreshing instance network info cache due to event network-changed-b83be58c-0aca-40bd-b217-035d5fadc9a1. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1347.875711] env[68569]: DEBUG oslo_concurrency.lockutils [req-be0cb547-aaf4-4e19-8c18-93d26e78b6f5 req-2e23f759-1a5a-4847-9449-eec881131662 service nova] Acquiring lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.875954] env[68569]: DEBUG oslo_concurrency.lockutils [req-be0cb547-aaf4-4e19-8c18-93d26e78b6f5 req-2e23f759-1a5a-4847-9449-eec881131662 service nova] Acquired lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1347.876257] env[68569]: DEBUG nova.network.neutron [req-be0cb547-aaf4-4e19-8c18-93d26e78b6f5 req-2e23f759-1a5a-4847-9449-eec881131662 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Refreshing network info cache for port b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1348.119993] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168078, 'name': CreateVM_Task, 'duration_secs': 0.303212} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.120195] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1348.120821] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.120986] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1348.121323] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1348.121590] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7b7a855-c21d-4df2-bdc6-04c0e8203a08 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.125844] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1348.125844] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d93329-c9f0-0898-b2f8-88abd351f3a8" [ 1348.125844] env[68569]: _type = "Task" [ 1348.125844] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.132901] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d93329-c9f0-0898-b2f8-88abd351f3a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.209362] env[68569]: DEBUG oslo_concurrency.lockutils [None req-542bbe05-07e2-45ff-8878-ca8725177675 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Lock "d593a549-45d3-4ae2-a4ac-96e945a762e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.389s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1348.283260] env[68569]: DEBUG oslo_concurrency.lockutils [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Acquiring lock "d593a549-45d3-4ae2-a4ac-96e945a762e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1348.283481] env[68569]: DEBUG oslo_concurrency.lockutils [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Lock "d593a549-45d3-4ae2-a4ac-96e945a762e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1348.283698] env[68569]: DEBUG oslo_concurrency.lockutils [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Acquiring lock "d593a549-45d3-4ae2-a4ac-96e945a762e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1348.283885] env[68569]: DEBUG oslo_concurrency.lockutils [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Lock "d593a549-45d3-4ae2-a4ac-96e945a762e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1348.284068] env[68569]: DEBUG oslo_concurrency.lockutils [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Lock "d593a549-45d3-4ae2-a4ac-96e945a762e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1348.286213] env[68569]: INFO nova.compute.manager [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Terminating instance [ 1348.550116] env[68569]: DEBUG nova.network.neutron [req-be0cb547-aaf4-4e19-8c18-93d26e78b6f5 req-2e23f759-1a5a-4847-9449-eec881131662 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Updated VIF entry in instance network info cache for port b83be58c-0aca-40bd-b217-035d5fadc9a1. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1348.550779] env[68569]: DEBUG nova.network.neutron [req-be0cb547-aaf4-4e19-8c18-93d26e78b6f5 req-2e23f759-1a5a-4847-9449-eec881131662 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Updating instance_info_cache with network_info: [{"id": "b83be58c-0aca-40bd-b217-035d5fadc9a1", "address": "fa:16:3e:f3:2d:a1", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb83be58c-0a", "ovs_interfaceid": "b83be58c-0aca-40bd-b217-035d5fadc9a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.636232] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52d93329-c9f0-0898-b2f8-88abd351f3a8, 'name': SearchDatastore_Task, 'duration_secs': 0.010498} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.636508] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1348.636727] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1348.636956] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.637128] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1348.637330] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1348.637577] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80484ca3-8f07-4895-80bd-fb681a2e842b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.645447] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1348.645616] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1348.646327] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe2244ab-cf55-4975-a4b7-19fc8fa8bfda {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.651585] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1348.651585] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529dc5a8-fd78-4cee-bed9-67d1bd4c1aec" [ 1348.651585] env[68569]: _type = "Task" [ 1348.651585] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.658476] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529dc5a8-fd78-4cee-bed9-67d1bd4c1aec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.789471] env[68569]: DEBUG nova.compute.manager [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1348.789684] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1348.790590] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c47c52-9de1-431b-8eb2-7a1801bc52e5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.799413] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1348.799585] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-efd1936e-0d6a-4331-8a3e-e071a42cbfe3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.804894] env[68569]: DEBUG oslo_vmware.api [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Waiting for the task: (returnval){ [ 1348.804894] env[68569]: value = "task-3168079" [ 1348.804894] env[68569]: _type = "Task" [ 1348.804894] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.813306] env[68569]: DEBUG oslo_vmware.api [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168079, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.053295] env[68569]: DEBUG oslo_concurrency.lockutils [req-be0cb547-aaf4-4e19-8c18-93d26e78b6f5 req-2e23f759-1a5a-4847-9449-eec881131662 service nova] Releasing lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1349.166286] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529dc5a8-fd78-4cee-bed9-67d1bd4c1aec, 'name': SearchDatastore_Task, 'duration_secs': 0.008132} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.167438] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b63997bc-faa6-477c-93ae-2845b39799b9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.173034] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1349.173034] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e917da-bac4-5880-84a6-9bed3a2acbcb" [ 1349.173034] env[68569]: _type = "Task" [ 1349.173034] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.180508] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e917da-bac4-5880-84a6-9bed3a2acbcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.314676] env[68569]: DEBUG oslo_vmware.api [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168079, 'name': PowerOffVM_Task, 'duration_secs': 0.190292} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.315050] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1349.315128] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1349.315339] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c3a3f40-800f-4ec7-960d-d406ca0ed2c8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.377036] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1349.377254] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1349.377435] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Deleting the datastore file [datastore2] d593a549-45d3-4ae2-a4ac-96e945a762e3 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1349.377699] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57043299-dd51-4f2c-ba53-997f00bcdf30 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.383777] env[68569]: DEBUG oslo_vmware.api [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Waiting for the task: (returnval){ [ 1349.383777] env[68569]: value = "task-3168081" [ 1349.383777] env[68569]: _type = "Task" [ 1349.383777] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.391729] env[68569]: DEBUG oslo_vmware.api [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168081, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.683182] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52e917da-bac4-5880-84a6-9bed3a2acbcb, 'name': SearchDatastore_Task, 'duration_secs': 0.009003} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.683435] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1349.683691] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 34a807d6-1fdd-47d8-a72e-bf2345de321b/34a807d6-1fdd-47d8-a72e-bf2345de321b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1349.683945] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-602e1c43-83bd-44d2-8cf5-7453443516f3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.690361] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1349.690361] env[68569]: value = "task-3168082" [ 1349.690361] env[68569]: _type = "Task" [ 1349.690361] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.697319] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168082, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.893822] env[68569]: DEBUG oslo_vmware.api [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Task: {'id': task-3168081, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142628} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.894040] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1349.894210] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1349.894389] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1349.894564] env[68569]: INFO nova.compute.manager [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1349.894817] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1349.895018] env[68569]: DEBUG nova.compute.manager [-] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1349.895117] env[68569]: DEBUG nova.network.neutron [-] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1350.203067] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168082, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.430848} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.203067] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] 34a807d6-1fdd-47d8-a72e-bf2345de321b/34a807d6-1fdd-47d8-a72e-bf2345de321b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1350.203067] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1350.203067] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35599391-d904-4766-b22e-e84d1621dd29 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.209025] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1350.209025] env[68569]: value = "task-3168083" [ 1350.209025] env[68569]: _type = "Task" [ 1350.209025] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.215250] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168083, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.430508] env[68569]: DEBUG nova.compute.manager [req-3be603cc-8835-4c2e-ad43-aadf9d564eac req-ec8306c7-4d48-4e7c-a762-f51d02334839 service nova] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Received event network-vif-deleted-29d83766-922d-496b-89a8-3c0762656469 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1350.430777] env[68569]: INFO nova.compute.manager [req-3be603cc-8835-4c2e-ad43-aadf9d564eac req-ec8306c7-4d48-4e7c-a762-f51d02334839 service nova] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Neutron deleted interface 29d83766-922d-496b-89a8-3c0762656469; detaching it from the instance and deleting it from the info cache [ 1350.431033] env[68569]: DEBUG nova.network.neutron [req-3be603cc-8835-4c2e-ad43-aadf9d564eac req-ec8306c7-4d48-4e7c-a762-f51d02334839 service nova] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.716178] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168083, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05929} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.716472] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1350.717236] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd13268a-ba88-4488-a9a4-83fc9028f495 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.739582] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 34a807d6-1fdd-47d8-a72e-bf2345de321b/34a807d6-1fdd-47d8-a72e-bf2345de321b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1350.739792] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1bdba14f-9164-43b5-b6c0-b835a46ebf65 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.758310] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1350.758310] env[68569]: value = "task-3168084" [ 1350.758310] env[68569]: _type = "Task" [ 1350.758310] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.765649] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168084, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.906419] env[68569]: DEBUG nova.network.neutron [-] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.933943] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1feec34-e4e7-4e29-98a6-fa73310b21ad {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.944012] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c351e16d-37e3-4ae7-b86a-b0f4f98e753d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.971727] env[68569]: DEBUG nova.compute.manager [req-3be603cc-8835-4c2e-ad43-aadf9d564eac req-ec8306c7-4d48-4e7c-a762-f51d02334839 service nova] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Detach interface failed, port_id=29d83766-922d-496b-89a8-3c0762656469, reason: Instance d593a549-45d3-4ae2-a4ac-96e945a762e3 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1351.268338] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168084, 'name': ReconfigVM_Task, 'duration_secs': 0.312903} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.268520] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 34a807d6-1fdd-47d8-a72e-bf2345de321b/34a807d6-1fdd-47d8-a72e-bf2345de321b.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1351.269061] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9c0f763-155e-4665-946e-b2377e715491 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.274921] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1351.274921] env[68569]: value = "task-3168085" [ 1351.274921] env[68569]: _type = "Task" [ 1351.274921] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.283098] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168085, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.409112] env[68569]: INFO nova.compute.manager [-] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Took 1.51 seconds to deallocate network for instance. [ 1351.784756] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168085, 'name': Rename_Task, 'duration_secs': 0.142048} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.785147] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1351.785267] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7c87cb7-dd20-4891-960d-554fbac35b05 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.791433] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1351.791433] env[68569]: value = "task-3168086" [ 1351.791433] env[68569]: _type = "Task" [ 1351.791433] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.798919] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168086, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.916356] env[68569]: DEBUG oslo_concurrency.lockutils [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1351.916670] env[68569]: DEBUG oslo_concurrency.lockutils [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1351.916892] env[68569]: DEBUG nova.objects.instance [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Lazy-loading 'resources' on Instance uuid d593a549-45d3-4ae2-a4ac-96e945a762e3 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1352.300988] env[68569]: DEBUG oslo_vmware.api [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168086, 'name': PowerOnVM_Task, 'duration_secs': 0.467098} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.301283] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1352.301489] env[68569]: INFO nova.compute.manager [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Took 6.57 seconds to spawn the instance on the hypervisor. [ 1352.301692] env[68569]: DEBUG nova.compute.manager [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1352.302471] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cec1ae3-97a2-4486-875f-7365cf344106 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.469995] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5b59f4-96eb-4ee1-9f60-d0e6f768bb09 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.477224] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9484aa45-5fc9-4b04-b232-f40d518286df {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.506734] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce99929-c064-4caf-a191-eb7f5ae9672c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.513331] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0bfd8a0-33fc-49de-8b56-fe638a2a7b2c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.525863] env[68569]: DEBUG nova.compute.provider_tree [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1352.818615] env[68569]: INFO nova.compute.manager [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Took 11.26 seconds to build instance. [ 1353.028785] env[68569]: DEBUG nova.scheduler.client.report [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1353.321012] env[68569]: DEBUG oslo_concurrency.lockutils [None req-b6f90da2-53e7-4018-819e-ba14a48f8d50 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.770s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1353.461222] env[68569]: DEBUG nova.compute.manager [req-5ace6695-4754-4c22-9224-784f48000cd8 req-8b997dfa-7dba-4c4a-9acb-3e513ab0bd4e service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Received event network-changed-b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1353.461222] env[68569]: DEBUG nova.compute.manager [req-5ace6695-4754-4c22-9224-784f48000cd8 req-8b997dfa-7dba-4c4a-9acb-3e513ab0bd4e service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Refreshing instance network info cache due to event network-changed-b83be58c-0aca-40bd-b217-035d5fadc9a1. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1353.461222] env[68569]: DEBUG oslo_concurrency.lockutils [req-5ace6695-4754-4c22-9224-784f48000cd8 req-8b997dfa-7dba-4c4a-9acb-3e513ab0bd4e service nova] Acquiring lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.461357] env[68569]: DEBUG oslo_concurrency.lockutils [req-5ace6695-4754-4c22-9224-784f48000cd8 req-8b997dfa-7dba-4c4a-9acb-3e513ab0bd4e service nova] Acquired lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1353.461462] env[68569]: DEBUG nova.network.neutron [req-5ace6695-4754-4c22-9224-784f48000cd8 req-8b997dfa-7dba-4c4a-9acb-3e513ab0bd4e service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Refreshing network info cache for port b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1353.533714] env[68569]: DEBUG oslo_concurrency.lockutils [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.616s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1353.553618] env[68569]: INFO nova.scheduler.client.report [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Deleted allocations for instance d593a549-45d3-4ae2-a4ac-96e945a762e3 [ 1354.064854] env[68569]: DEBUG oslo_concurrency.lockutils [None req-084bd711-ca38-4d35-9ee2-c0563a80c687 tempest-ServersNegativeTestMultiTenantJSON-2012892319 tempest-ServersNegativeTestMultiTenantJSON-2012892319-project-member] Lock "d593a549-45d3-4ae2-a4ac-96e945a762e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.781s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1354.168027] env[68569]: DEBUG nova.network.neutron [req-5ace6695-4754-4c22-9224-784f48000cd8 req-8b997dfa-7dba-4c4a-9acb-3e513ab0bd4e service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Updated VIF entry in instance network info cache for port b83be58c-0aca-40bd-b217-035d5fadc9a1. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1354.168401] env[68569]: DEBUG nova.network.neutron [req-5ace6695-4754-4c22-9224-784f48000cd8 req-8b997dfa-7dba-4c4a-9acb-3e513ab0bd4e service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Updating instance_info_cache with network_info: [{"id": "b83be58c-0aca-40bd-b217-035d5fadc9a1", "address": "fa:16:3e:f3:2d:a1", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb83be58c-0a", "ovs_interfaceid": "b83be58c-0aca-40bd-b217-035d5fadc9a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.670801] env[68569]: DEBUG oslo_concurrency.lockutils [req-5ace6695-4754-4c22-9224-784f48000cd8 req-8b997dfa-7dba-4c4a-9acb-3e513ab0bd4e service nova] Releasing lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1370.593231] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1372.596659] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1372.597143] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1372.597143] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1372.597291] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1373.597649] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1376.597666] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1377.101263] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1377.101514] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1377.101685] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1377.101840] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68569) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1377.102767] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58683b08-6d6b-4c6b-84d6-dc696090d6b1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.111136] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8251a2-fc7d-4c8f-b4e6-34317605c3d6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.124663] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e48802f-a26f-4abf-8286-fa4ac8cf9b00 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.130500] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa29a29c-6c2c-4d4d-ae0f-501a0c90563d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.159991] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180852MB free_disk=129GB free_vcpus=48 pci_devices=None {{(pid=68569) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1377.160147] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1377.160346] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1378.186219] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance d57645fc-82d5-4ae8-93c3-0de095a66649 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.186535] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance 34a807d6-1fdd-47d8-a72e-bf2345de321b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.186611] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1378.186689] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1378.221628] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4b3907-74f4-4c20-9ff7-205b8c422a46 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.228834] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8457fc-d7f7-4a5b-87d1-8b158001badd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.258013] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9fca4ae-ab4e-4da2-a633-df2ca858cbb9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.265358] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bb170d-e17e-4a7b-a66f-2434fe53943c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.278209] env[68569]: DEBUG nova.compute.provider_tree [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1378.780836] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1379.135023] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "d57645fc-82d5-4ae8-93c3-0de095a66649" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1379.135300] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1379.135507] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "d57645fc-82d5-4ae8-93c3-0de095a66649-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1379.135686] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1379.135850] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1379.139402] env[68569]: INFO nova.compute.manager [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Terminating instance [ 1379.285040] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1379.285469] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.125s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1379.643541] env[68569]: DEBUG nova.compute.manager [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1379.643791] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1379.644688] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a20e84-d32f-4185-b4c2-235044a7c7a2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.652723] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1379.652975] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-000369fd-4d11-4019-b835-22b0002ab05c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.659566] env[68569]: DEBUG oslo_vmware.api [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1379.659566] env[68569]: value = "task-3168087" [ 1379.659566] env[68569]: _type = "Task" [ 1379.659566] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.667431] env[68569]: DEBUG oslo_vmware.api [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168087, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.168175] env[68569]: DEBUG oslo_vmware.api [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168087, 'name': PowerOffVM_Task, 'duration_secs': 0.155631} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.168436] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1380.168600] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1380.168841] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-156d0e36-ec40-48da-b136-e72435d903e1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.227539] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1380.227770] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1380.227955] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleting the datastore file [datastore1] d57645fc-82d5-4ae8-93c3-0de095a66649 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1380.228225] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4af81e8-eed8-4251-9838-6c9f67da5fb1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.233933] env[68569]: DEBUG oslo_vmware.api [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for the task: (returnval){ [ 1380.233933] env[68569]: value = "task-3168089" [ 1380.233933] env[68569]: _type = "Task" [ 1380.233933] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.240996] env[68569]: DEBUG oslo_vmware.api [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.284518] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1380.284708] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68569) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11160}} [ 1380.748156] env[68569]: DEBUG oslo_vmware.api [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Task: {'id': task-3168089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138235} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.748543] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1380.748790] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1380.749097] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1380.749389] env[68569]: INFO nova.compute.manager [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1380.749800] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1380.750079] env[68569]: DEBUG nova.compute.manager [-] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1380.750232] env[68569]: DEBUG nova.network.neutron [-] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1381.169371] env[68569]: DEBUG nova.compute.manager [req-8916f5c1-273d-4887-8593-80905d87fd2d req-7d1fc392-f2a7-4c15-b091-e6e2147ac7cf service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Received event network-vif-deleted-224a9b72-1fe5-455e-8f12-0ba6f5dd104a {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1381.169716] env[68569]: INFO nova.compute.manager [req-8916f5c1-273d-4887-8593-80905d87fd2d req-7d1fc392-f2a7-4c15-b091-e6e2147ac7cf service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Neutron deleted interface 224a9b72-1fe5-455e-8f12-0ba6f5dd104a; detaching it from the instance and deleting it from the info cache [ 1381.169804] env[68569]: DEBUG nova.network.neutron [req-8916f5c1-273d-4887-8593-80905d87fd2d req-7d1fc392-f2a7-4c15-b091-e6e2147ac7cf service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.649096] env[68569]: DEBUG nova.network.neutron [-] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.673069] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-96ef884a-6634-4b3d-b74e-f1f4ad760a25 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.683202] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4305f28a-31e9-4948-b6b7-ab358c545799 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.709681] env[68569]: DEBUG nova.compute.manager [req-8916f5c1-273d-4887-8593-80905d87fd2d req-7d1fc392-f2a7-4c15-b091-e6e2147ac7cf service nova] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Detach interface failed, port_id=224a9b72-1fe5-455e-8f12-0ba6f5dd104a, reason: Instance d57645fc-82d5-4ae8-93c3-0de095a66649 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1382.152278] env[68569]: INFO nova.compute.manager [-] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Took 1.40 seconds to deallocate network for instance. [ 1382.658786] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1382.659152] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1382.659381] env[68569]: DEBUG nova.objects.instance [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lazy-loading 'resources' on Instance uuid d57645fc-82d5-4ae8-93c3-0de095a66649 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1383.202910] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8719785-c834-4141-b7b5-7a2333f72afa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.210340] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0193c670-7ea9-4bd3-9fb8-79d6a605bfc8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.239889] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21740ae-b83c-4f15-bfed-5d0120e22afd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.247115] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7af97e-b93b-4117-af69-bb69a70cdb44 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.260070] env[68569]: DEBUG nova.compute.provider_tree [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1383.592631] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.763662] env[68569]: DEBUG nova.scheduler.client.report [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1384.268463] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.609s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1384.291906] env[68569]: INFO nova.scheduler.client.report [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Deleted allocations for instance d57645fc-82d5-4ae8-93c3-0de095a66649 [ 1384.800114] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5661f025-c30a-4eb3-8d40-42ea071d88af tempest-AttachVolumeShelveTestJSON-604261640 tempest-AttachVolumeShelveTestJSON-604261640-project-member] Lock "d57645fc-82d5-4ae8-93c3-0de095a66649" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.665s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1391.236931] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "34a807d6-1fdd-47d8-a72e-bf2345de321b" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1391.237240] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1391.237416] env[68569]: INFO nova.compute.manager [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Shelving [ 1392.246656] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1392.247028] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-870b92a3-3927-4e40-9d2b-53103bd01c4b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.254566] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1392.254566] env[68569]: value = "task-3168091" [ 1392.254566] env[68569]: _type = "Task" [ 1392.254566] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.262287] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168091, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.764465] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168091, 'name': PowerOffVM_Task, 'duration_secs': 0.179804} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.764742] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1392.765548] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c518d6-d49e-48a3-ae17-f68a569e2d34 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.783821] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743239f1-c33a-46fb-ae91-902de2e74186 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.294322] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Creating Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1393.294711] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4d2b8373-aca0-4e61-a475-67b0709c3944 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.302306] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1393.302306] env[68569]: value = "task-3168092" [ 1393.302306] env[68569]: _type = "Task" [ 1393.302306] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.310280] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168092, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.813880] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168092, 'name': CreateSnapshot_Task, 'duration_secs': 0.413308} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.814198] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Created Snapshot of the VM instance {{(pid=68569) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1393.814994] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd783485-1854-4738-8aca-a72fd53c2a60 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.333365] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Creating linked-clone VM from snapshot {{(pid=68569) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1394.333726] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3141119b-2518-4692-b5d4-f874d8bdf46f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.341904] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1394.341904] env[68569]: value = "task-3168093" [ 1394.341904] env[68569]: _type = "Task" [ 1394.341904] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.349850] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168093, 'name': CloneVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.852429] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168093, 'name': CloneVM_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.352780] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168093, 'name': CloneVM_Task, 'duration_secs': 0.930117} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.353244] env[68569]: INFO nova.virt.vmwareapi.vmops [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Created linked-clone VM from snapshot [ 1395.353866] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf8f11e-7c7d-4b6a-9df8-6b05719945fe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.360984] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Uploading image 09960703-30d2-4a2c-af9e-39377b6f7b30 {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1395.380680] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1395.380680] env[68569]: value = "vm-633772" [ 1395.380680] env[68569]: _type = "VirtualMachine" [ 1395.380680] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1395.380935] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-17655a7a-c975-400f-9d0a-f09fe464e018 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.387097] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lease: (returnval){ [ 1395.387097] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b64680-86f3-ac40-3d77-6ac543952c52" [ 1395.387097] env[68569]: _type = "HttpNfcLease" [ 1395.387097] env[68569]: } obtained for exporting VM: (result){ [ 1395.387097] env[68569]: value = "vm-633772" [ 1395.387097] env[68569]: _type = "VirtualMachine" [ 1395.387097] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1395.387357] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the lease: (returnval){ [ 1395.387357] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b64680-86f3-ac40-3d77-6ac543952c52" [ 1395.387357] env[68569]: _type = "HttpNfcLease" [ 1395.387357] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1395.392893] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1395.392893] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b64680-86f3-ac40-3d77-6ac543952c52" [ 1395.392893] env[68569]: _type = "HttpNfcLease" [ 1395.392893] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1395.896208] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1395.896208] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b64680-86f3-ac40-3d77-6ac543952c52" [ 1395.896208] env[68569]: _type = "HttpNfcLease" [ 1395.896208] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1395.898447] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1395.898447] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52b64680-86f3-ac40-3d77-6ac543952c52" [ 1395.898447] env[68569]: _type = "HttpNfcLease" [ 1395.898447] env[68569]: }. {{(pid=68569) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1395.898447] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477b7313-b68b-4ca3-98c0-7376d5032bb4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.904554] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e75736-0981-0885-a804-fa4a03e49b44/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1395.904729] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e75736-0981-0885-a804-fa4a03e49b44/disk-0.vmdk for reading. {{(pid=68569) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1395.995151] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-63ff0898-cdce-4c33-98b5-712674ddad09 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.236933] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e75736-0981-0885-a804-fa4a03e49b44/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1404.237879] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197d1b07-49a6-44bb-81d0-c8d425bbd1c6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.244130] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e75736-0981-0885-a804-fa4a03e49b44/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1404.244293] env[68569]: ERROR oslo_vmware.rw_handles [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e75736-0981-0885-a804-fa4a03e49b44/disk-0.vmdk due to incomplete transfer. [ 1404.244498] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4ce67794-fe08-4065-ad92-00a8fdb274b2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.251422] env[68569]: DEBUG oslo_vmware.rw_handles [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e75736-0981-0885-a804-fa4a03e49b44/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1404.251611] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Uploaded image 09960703-30d2-4a2c-af9e-39377b6f7b30 to the Glance image server {{(pid=68569) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1404.253828] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Destroying the VM {{(pid=68569) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1404.254055] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-10e28af1-e163-449b-9792-109e771a5c80 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.259546] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1404.259546] env[68569]: value = "task-3168095" [ 1404.259546] env[68569]: _type = "Task" [ 1404.259546] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.266672] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168095, 'name': Destroy_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.769050] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168095, 'name': Destroy_Task} progress is 33%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.269711] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168095, 'name': Destroy_Task, 'duration_secs': 0.587897} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.270064] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Destroyed the VM [ 1405.270272] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Deleting Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1405.270503] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5ed88d0f-13a6-4e76-a182-6966f1b4155e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.276497] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1405.276497] env[68569]: value = "task-3168096" [ 1405.276497] env[68569]: _type = "Task" [ 1405.276497] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.283794] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168096, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.785966] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168096, 'name': RemoveSnapshot_Task, 'duration_secs': 0.38671} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.786271] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Deleted Snapshot of the VM instance {{(pid=68569) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1405.786557] env[68569]: DEBUG nova.compute.manager [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1405.787310] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b7ee9d2-9739-4855-a63e-acb7749c2121 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.299233] env[68569]: INFO nova.compute.manager [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Shelve offloading [ 1406.802868] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1406.803216] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0eaa5590-e6f9-4e37-988e-850fc9a8bbbe {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.810766] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1406.810766] env[68569]: value = "task-3168097" [ 1406.810766] env[68569]: _type = "Task" [ 1406.810766] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.818802] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168097, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.320896] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] VM already powered off {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1407.321286] env[68569]: DEBUG nova.compute.manager [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1407.321880] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282a89c5-504f-4f65-917d-3df4ec34848f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.327471] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.327643] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1407.327798] env[68569]: DEBUG nova.network.neutron [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1408.019952] env[68569]: DEBUG nova.network.neutron [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Updating instance_info_cache with network_info: [{"id": "b83be58c-0aca-40bd-b217-035d5fadc9a1", "address": "fa:16:3e:f3:2d:a1", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb83be58c-0a", "ovs_interfaceid": "b83be58c-0aca-40bd-b217-035d5fadc9a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1408.522804] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1408.717530] env[68569]: DEBUG nova.compute.manager [req-fc2917cd-efca-4ad1-b405-441813719925 req-df511fe4-c12b-4b00-acfe-35f724a4cff3 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Received event network-vif-unplugged-b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1408.717746] env[68569]: DEBUG oslo_concurrency.lockutils [req-fc2917cd-efca-4ad1-b405-441813719925 req-df511fe4-c12b-4b00-acfe-35f724a4cff3 service nova] Acquiring lock "34a807d6-1fdd-47d8-a72e-bf2345de321b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1408.717956] env[68569]: DEBUG oslo_concurrency.lockutils [req-fc2917cd-efca-4ad1-b405-441813719925 req-df511fe4-c12b-4b00-acfe-35f724a4cff3 service nova] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1408.718143] env[68569]: DEBUG oslo_concurrency.lockutils [req-fc2917cd-efca-4ad1-b405-441813719925 req-df511fe4-c12b-4b00-acfe-35f724a4cff3 service nova] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1408.718356] env[68569]: DEBUG nova.compute.manager [req-fc2917cd-efca-4ad1-b405-441813719925 req-df511fe4-c12b-4b00-acfe-35f724a4cff3 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] No waiting events found dispatching network-vif-unplugged-b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1408.718556] env[68569]: WARNING nova.compute.manager [req-fc2917cd-efca-4ad1-b405-441813719925 req-df511fe4-c12b-4b00-acfe-35f724a4cff3 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Received unexpected event network-vif-unplugged-b83be58c-0aca-40bd-b217-035d5fadc9a1 for instance with vm_state shelved and task_state shelving_offloading. [ 1408.799691] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1408.800615] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9a7bf6-d606-4acd-8a38-effa0747cc60 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.807921] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1408.808154] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff396d60-0baf-40ee-a9f9-753389de414e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.872797] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1408.873078] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1408.873238] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Deleting the datastore file [datastore2] 34a807d6-1fdd-47d8-a72e-bf2345de321b {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1408.873498] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-259cf8ca-e580-4f60-906b-f53f8dd5c0d0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.879522] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1408.879522] env[68569]: value = "task-3168099" [ 1408.879522] env[68569]: _type = "Task" [ 1408.879522] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.886864] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168099, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.389264] env[68569]: DEBUG oslo_vmware.api [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168099, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131192} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.389497] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1409.389672] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1409.389844] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1409.413165] env[68569]: INFO nova.scheduler.client.report [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Deleted allocations for instance 34a807d6-1fdd-47d8-a72e-bf2345de321b [ 1409.917294] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1409.917613] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1409.917848] env[68569]: DEBUG nova.objects.instance [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lazy-loading 'resources' on Instance uuid 34a807d6-1fdd-47d8-a72e-bf2345de321b {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1410.419731] env[68569]: DEBUG nova.objects.instance [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lazy-loading 'numa_topology' on Instance uuid 34a807d6-1fdd-47d8-a72e-bf2345de321b {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1410.749338] env[68569]: DEBUG nova.compute.manager [req-21f96e9d-4863-4011-baad-63a79e37ebe6 req-3ce0b6a5-fb26-415a-99d6-72cd5350b8bb service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Received event network-changed-b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1410.749539] env[68569]: DEBUG nova.compute.manager [req-21f96e9d-4863-4011-baad-63a79e37ebe6 req-3ce0b6a5-fb26-415a-99d6-72cd5350b8bb service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Refreshing instance network info cache due to event network-changed-b83be58c-0aca-40bd-b217-035d5fadc9a1. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1410.749745] env[68569]: DEBUG oslo_concurrency.lockutils [req-21f96e9d-4863-4011-baad-63a79e37ebe6 req-3ce0b6a5-fb26-415a-99d6-72cd5350b8bb service nova] Acquiring lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.749889] env[68569]: DEBUG oslo_concurrency.lockutils [req-21f96e9d-4863-4011-baad-63a79e37ebe6 req-3ce0b6a5-fb26-415a-99d6-72cd5350b8bb service nova] Acquired lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1410.750060] env[68569]: DEBUG nova.network.neutron [req-21f96e9d-4863-4011-baad-63a79e37ebe6 req-3ce0b6a5-fb26-415a-99d6-72cd5350b8bb service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Refreshing network info cache for port b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1410.907741] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "34a807d6-1fdd-47d8-a72e-bf2345de321b" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1410.921410] env[68569]: DEBUG nova.objects.base [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Object Instance<34a807d6-1fdd-47d8-a72e-bf2345de321b> lazy-loaded attributes: resources,numa_topology {{(pid=68569) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1410.950284] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0fb6090-18e2-42e3-a9fa-3da7b3de4cbb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.957095] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81932fb-c952-4f31-8359-fd0798d3da0f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.987388] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2708c591-b35e-4995-8814-6136b3c16199 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.994705] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06890f4-afb4-4cb2-9886-28b25e2701c8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.008189] env[68569]: DEBUG nova.compute.provider_tree [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.433840] env[68569]: DEBUG nova.network.neutron [req-21f96e9d-4863-4011-baad-63a79e37ebe6 req-3ce0b6a5-fb26-415a-99d6-72cd5350b8bb service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Updated VIF entry in instance network info cache for port b83be58c-0aca-40bd-b217-035d5fadc9a1. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1411.434199] env[68569]: DEBUG nova.network.neutron [req-21f96e9d-4863-4011-baad-63a79e37ebe6 req-3ce0b6a5-fb26-415a-99d6-72cd5350b8bb service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Updating instance_info_cache with network_info: [{"id": "b83be58c-0aca-40bd-b217-035d5fadc9a1", "address": "fa:16:3e:f3:2d:a1", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapb83be58c-0a", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.510811] env[68569]: DEBUG nova.scheduler.client.report [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1411.937277] env[68569]: DEBUG oslo_concurrency.lockutils [req-21f96e9d-4863-4011-baad-63a79e37ebe6 req-3ce0b6a5-fb26-415a-99d6-72cd5350b8bb service nova] Releasing lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1412.015466] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.098s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1412.523123] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5eed756e-9734-4d50-928e-5dc06c92c638 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.286s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1412.524139] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.616s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1412.524255] env[68569]: INFO nova.compute.manager [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Unshelving [ 1413.545690] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1413.545968] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1413.546192] env[68569]: DEBUG nova.objects.instance [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lazy-loading 'pci_requests' on Instance uuid 34a807d6-1fdd-47d8-a72e-bf2345de321b {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1414.050193] env[68569]: DEBUG nova.objects.instance [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lazy-loading 'numa_topology' on Instance uuid 34a807d6-1fdd-47d8-a72e-bf2345de321b {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1414.552987] env[68569]: INFO nova.compute.claims [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1415.587015] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6757b265-336d-4a03-90df-177d80776916 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.594746] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea57df66-ae03-46fa-bc88-1e49e31d6626 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.625320] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0efbe1e4-0bde-431d-bb97-1e0b1f11dae2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.632108] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ba1c63-67dc-40c8-a5ed-856503a1191b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.647190] env[68569]: DEBUG nova.compute.provider_tree [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1416.150136] env[68569]: DEBUG nova.scheduler.client.report [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1416.655035] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.109s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1416.683593] env[68569]: INFO nova.network.neutron [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Updating port b83be58c-0aca-40bd-b217-035d5fadc9a1 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1418.050042] env[68569]: DEBUG nova.compute.manager [req-44791ab5-aef7-468e-ade1-cadd20814e26 req-2ffb18bc-600a-4f67-ada5-bbce79020777 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Received event network-vif-plugged-b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1418.050288] env[68569]: DEBUG oslo_concurrency.lockutils [req-44791ab5-aef7-468e-ade1-cadd20814e26 req-2ffb18bc-600a-4f67-ada5-bbce79020777 service nova] Acquiring lock "34a807d6-1fdd-47d8-a72e-bf2345de321b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1418.050444] env[68569]: DEBUG oslo_concurrency.lockutils [req-44791ab5-aef7-468e-ade1-cadd20814e26 req-2ffb18bc-600a-4f67-ada5-bbce79020777 service nova] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1418.050653] env[68569]: DEBUG oslo_concurrency.lockutils [req-44791ab5-aef7-468e-ade1-cadd20814e26 req-2ffb18bc-600a-4f67-ada5-bbce79020777 service nova] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1418.050826] env[68569]: DEBUG nova.compute.manager [req-44791ab5-aef7-468e-ade1-cadd20814e26 req-2ffb18bc-600a-4f67-ada5-bbce79020777 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] No waiting events found dispatching network-vif-plugged-b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1418.051065] env[68569]: WARNING nova.compute.manager [req-44791ab5-aef7-468e-ade1-cadd20814e26 req-2ffb18bc-600a-4f67-ada5-bbce79020777 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Received unexpected event network-vif-plugged-b83be58c-0aca-40bd-b217-035d5fadc9a1 for instance with vm_state shelved_offloaded and task_state spawning. [ 1418.119643] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.119819] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1418.120006] env[68569]: DEBUG nova.network.neutron [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1418.853310] env[68569]: DEBUG nova.network.neutron [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Updating instance_info_cache with network_info: [{"id": "b83be58c-0aca-40bd-b217-035d5fadc9a1", "address": "fa:16:3e:f3:2d:a1", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb83be58c-0a", "ovs_interfaceid": "b83be58c-0aca-40bd-b217-035d5fadc9a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1419.356331] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1419.383233] env[68569]: DEBUG nova.virt.hardware [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='00d6a2d526e6c44acf0840d2f6413582',container_format='bare',created_at=2025-03-26T05:04:01Z,direct_url=,disk_format='vmdk',id=09960703-30d2-4a2c-af9e-39377b6f7b30,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1140531519-shelved',owner='335f83fe0b8e42aa80e8f0691b609649',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2025-03-26T05:04:15Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1419.383480] env[68569]: DEBUG nova.virt.hardware [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1419.383634] env[68569]: DEBUG nova.virt.hardware [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1419.383814] env[68569]: DEBUG nova.virt.hardware [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1419.383956] env[68569]: DEBUG nova.virt.hardware [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1419.384116] env[68569]: DEBUG nova.virt.hardware [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1419.384328] env[68569]: DEBUG nova.virt.hardware [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1419.384486] env[68569]: DEBUG nova.virt.hardware [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1419.384651] env[68569]: DEBUG nova.virt.hardware [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1419.384813] env[68569]: DEBUG nova.virt.hardware [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1419.384983] env[68569]: DEBUG nova.virt.hardware [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1419.385850] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6331eecc-c3cc-44a2-90f0-57d1f00fc023 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.393804] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ecf2ff-22bc-4fea-bde9-d25d2e9a68eb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.407138] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:2d:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b83be58c-0aca-40bd-b217-035d5fadc9a1', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1419.414536] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1419.414760] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1419.414956] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78410d3e-c744-4788-97a9-45daf112eed9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.433374] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1419.433374] env[68569]: value = "task-3168100" [ 1419.433374] env[68569]: _type = "Task" [ 1419.433374] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.440161] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168100, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.944132] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168100, 'name': CreateVM_Task, 'duration_secs': 0.289701} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.944132] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1419.944132] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/09960703-30d2-4a2c-af9e-39377b6f7b30" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.944407] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired lock "[datastore2] devstack-image-cache_base/09960703-30d2-4a2c-af9e-39377b6f7b30" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1419.944665] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/09960703-30d2-4a2c-af9e-39377b6f7b30" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1419.944923] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7803db0b-8f80-428b-b2d4-5a8e1396edd5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.949161] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1419.949161] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52991cb1-68ec-f02d-6d9c-3f0608e94c6d" [ 1419.949161] env[68569]: _type = "Task" [ 1419.949161] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.956541] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52991cb1-68ec-f02d-6d9c-3f0608e94c6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.074169] env[68569]: DEBUG nova.compute.manager [req-33c77e0a-cc10-4816-b1f7-9afd2406a351 req-c77e2c9e-f5de-4eb9-8f59-0d60931a1401 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Received event network-changed-b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1420.074367] env[68569]: DEBUG nova.compute.manager [req-33c77e0a-cc10-4816-b1f7-9afd2406a351 req-c77e2c9e-f5de-4eb9-8f59-0d60931a1401 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Refreshing instance network info cache due to event network-changed-b83be58c-0aca-40bd-b217-035d5fadc9a1. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1420.074577] env[68569]: DEBUG oslo_concurrency.lockutils [req-33c77e0a-cc10-4816-b1f7-9afd2406a351 req-c77e2c9e-f5de-4eb9-8f59-0d60931a1401 service nova] Acquiring lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.074741] env[68569]: DEBUG oslo_concurrency.lockutils [req-33c77e0a-cc10-4816-b1f7-9afd2406a351 req-c77e2c9e-f5de-4eb9-8f59-0d60931a1401 service nova] Acquired lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1420.074901] env[68569]: DEBUG nova.network.neutron [req-33c77e0a-cc10-4816-b1f7-9afd2406a351 req-c77e2c9e-f5de-4eb9-8f59-0d60931a1401 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Refreshing network info cache for port b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1420.459744] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lock "[datastore2] devstack-image-cache_base/09960703-30d2-4a2c-af9e-39377b6f7b30" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1420.460116] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Processing image 09960703-30d2-4a2c-af9e-39377b6f7b30 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1420.460336] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/09960703-30d2-4a2c-af9e-39377b6f7b30/09960703-30d2-4a2c-af9e-39377b6f7b30.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.460493] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquired lock "[datastore2] devstack-image-cache_base/09960703-30d2-4a2c-af9e-39377b6f7b30/09960703-30d2-4a2c-af9e-39377b6f7b30.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1420.460703] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1420.460948] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d14b1ab4-fcd1-4ae6-983d-27619a759a61 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.477762] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1420.477935] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1420.478623] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b4e37f6-3f97-4633-9cf5-9e0ef49d9a89 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.483310] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1420.483310] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524ced5f-16a4-427f-78ab-36f534fcb019" [ 1420.483310] env[68569]: _type = "Task" [ 1420.483310] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.493118] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524ced5f-16a4-427f-78ab-36f534fcb019, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.760510] env[68569]: DEBUG nova.network.neutron [req-33c77e0a-cc10-4816-b1f7-9afd2406a351 req-c77e2c9e-f5de-4eb9-8f59-0d60931a1401 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Updated VIF entry in instance network info cache for port b83be58c-0aca-40bd-b217-035d5fadc9a1. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1420.760911] env[68569]: DEBUG nova.network.neutron [req-33c77e0a-cc10-4816-b1f7-9afd2406a351 req-c77e2c9e-f5de-4eb9-8f59-0d60931a1401 service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Updating instance_info_cache with network_info: [{"id": "b83be58c-0aca-40bd-b217-035d5fadc9a1", "address": "fa:16:3e:f3:2d:a1", "network": {"id": "0c866cf0-be8b-4d84-9890-46b96b191b09", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1599908679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "335f83fe0b8e42aa80e8f0691b609649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb83be58c-0a", "ovs_interfaceid": "b83be58c-0aca-40bd-b217-035d5fadc9a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.993017] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Preparing fetch location {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1420.993246] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Fetch image to [datastore2] OSTACK_IMG_f4d649ac-7c20-492c-8b52-935510c331c6/OSTACK_IMG_f4d649ac-7c20-492c-8b52-935510c331c6.vmdk {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1420.993428] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Downloading stream optimized image 09960703-30d2-4a2c-af9e-39377b6f7b30 to [datastore2] OSTACK_IMG_f4d649ac-7c20-492c-8b52-935510c331c6/OSTACK_IMG_f4d649ac-7c20-492c-8b52-935510c331c6.vmdk on the data store datastore2 as vApp {{(pid=68569) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1420.993596] env[68569]: DEBUG nova.virt.vmwareapi.images [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Downloading image file data 09960703-30d2-4a2c-af9e-39377b6f7b30 to the ESX as VM named 'OSTACK_IMG_f4d649ac-7c20-492c-8b52-935510c331c6' {{(pid=68569) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1421.059390] env[68569]: DEBUG oslo_vmware.rw_handles [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1421.059390] env[68569]: value = "resgroup-9" [ 1421.059390] env[68569]: _type = "ResourcePool" [ 1421.059390] env[68569]: }. {{(pid=68569) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1421.059698] env[68569]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-16977208-968f-44a5-ad59-fc12019320e4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.080169] env[68569]: DEBUG oslo_vmware.rw_handles [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lease: (returnval){ [ 1421.080169] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529cb4ba-e107-875e-8d21-1fffeadc4c78" [ 1421.080169] env[68569]: _type = "HttpNfcLease" [ 1421.080169] env[68569]: } obtained for vApp import into resource pool (val){ [ 1421.080169] env[68569]: value = "resgroup-9" [ 1421.080169] env[68569]: _type = "ResourcePool" [ 1421.080169] env[68569]: }. {{(pid=68569) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1421.080464] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the lease: (returnval){ [ 1421.080464] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529cb4ba-e107-875e-8d21-1fffeadc4c78" [ 1421.080464] env[68569]: _type = "HttpNfcLease" [ 1421.080464] env[68569]: } to be ready. {{(pid=68569) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1421.086365] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1421.086365] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529cb4ba-e107-875e-8d21-1fffeadc4c78" [ 1421.086365] env[68569]: _type = "HttpNfcLease" [ 1421.086365] env[68569]: } is initializing. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1421.263884] env[68569]: DEBUG oslo_concurrency.lockutils [req-33c77e0a-cc10-4816-b1f7-9afd2406a351 req-c77e2c9e-f5de-4eb9-8f59-0d60931a1401 service nova] Releasing lock "refresh_cache-34a807d6-1fdd-47d8-a72e-bf2345de321b" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1421.588047] env[68569]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1421.588047] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529cb4ba-e107-875e-8d21-1fffeadc4c78" [ 1421.588047] env[68569]: _type = "HttpNfcLease" [ 1421.588047] env[68569]: } is ready. {{(pid=68569) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1421.588631] env[68569]: DEBUG oslo_vmware.rw_handles [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1421.588631] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]529cb4ba-e107-875e-8d21-1fffeadc4c78" [ 1421.588631] env[68569]: _type = "HttpNfcLease" [ 1421.588631] env[68569]: }. {{(pid=68569) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1421.589047] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee2f680-4713-4564-9acb-763438d03bd3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.595992] env[68569]: DEBUG oslo_vmware.rw_handles [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527f9f03-8988-8973-90b2-7e4f24a65acc/disk-0.vmdk from lease info. {{(pid=68569) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1421.596181] env[68569]: DEBUG oslo_vmware.rw_handles [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527f9f03-8988-8973-90b2-7e4f24a65acc/disk-0.vmdk. {{(pid=68569) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1421.659267] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ff32be76-9e10-4e56-be09-5374657c1af7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.772766] env[68569]: DEBUG oslo_vmware.rw_handles [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Completed reading data from the image iterator. {{(pid=68569) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1422.773093] env[68569]: DEBUG oslo_vmware.rw_handles [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527f9f03-8988-8973-90b2-7e4f24a65acc/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1422.774033] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408ceab6-383b-4c7c-972e-8e27c27ead4f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.780884] env[68569]: DEBUG oslo_vmware.rw_handles [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527f9f03-8988-8973-90b2-7e4f24a65acc/disk-0.vmdk is in state: ready. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1422.781100] env[68569]: DEBUG oslo_vmware.rw_handles [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527f9f03-8988-8973-90b2-7e4f24a65acc/disk-0.vmdk. {{(pid=68569) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1422.781331] env[68569]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-6ce445f0-40b6-49f2-b148-993e10e90e42 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.968445] env[68569]: DEBUG oslo_vmware.rw_handles [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527f9f03-8988-8973-90b2-7e4f24a65acc/disk-0.vmdk. {{(pid=68569) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1422.968667] env[68569]: INFO nova.virt.vmwareapi.images [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Downloaded image file data 09960703-30d2-4a2c-af9e-39377b6f7b30 [ 1422.969606] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2388472-96d3-4a19-912c-35226df1212c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.984932] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e95294f6-b578-4618-807c-64c6394ac9e0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.010230] env[68569]: INFO nova.virt.vmwareapi.images [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] The imported VM was unregistered [ 1423.012675] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Caching image {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1423.012902] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Creating directory with path [datastore2] devstack-image-cache_base/09960703-30d2-4a2c-af9e-39377b6f7b30 {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1423.013170] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5777b921-734a-4331-bb21-c0d6dabf0886 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.034574] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Created directory with path [datastore2] devstack-image-cache_base/09960703-30d2-4a2c-af9e-39377b6f7b30 {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1423.034769] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_f4d649ac-7c20-492c-8b52-935510c331c6/OSTACK_IMG_f4d649ac-7c20-492c-8b52-935510c331c6.vmdk to [datastore2] devstack-image-cache_base/09960703-30d2-4a2c-af9e-39377b6f7b30/09960703-30d2-4a2c-af9e-39377b6f7b30.vmdk. {{(pid=68569) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1423.035054] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-695af4bc-7f4f-436a-b041-f26a658ac66c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.041344] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1423.041344] env[68569]: value = "task-3168103" [ 1423.041344] env[68569]: _type = "Task" [ 1423.041344] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.049429] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168103, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.553430] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168103, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.051810] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168103, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.553249] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168103, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.059699] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168103, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.556613] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168103, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.192157} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.556879] env[68569]: INFO nova.virt.vmwareapi.ds_util [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_f4d649ac-7c20-492c-8b52-935510c331c6/OSTACK_IMG_f4d649ac-7c20-492c-8b52-935510c331c6.vmdk to [datastore2] devstack-image-cache_base/09960703-30d2-4a2c-af9e-39377b6f7b30/09960703-30d2-4a2c-af9e-39377b6f7b30.vmdk. [ 1425.557086] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Cleaning up location [datastore2] OSTACK_IMG_f4d649ac-7c20-492c-8b52-935510c331c6 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1425.557261] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_f4d649ac-7c20-492c-8b52-935510c331c6 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1425.557510] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e596c359-aa81-4186-8235-12721b976b69 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.563661] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1425.563661] env[68569]: value = "task-3168104" [ 1425.563661] env[68569]: _type = "Task" [ 1425.563661] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.570588] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168104, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.074127] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168104, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035767} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.074624] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1426.074785] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Releasing lock "[datastore2] devstack-image-cache_base/09960703-30d2-4a2c-af9e-39377b6f7b30/09960703-30d2-4a2c-af9e-39377b6f7b30.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1426.075106] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/09960703-30d2-4a2c-af9e-39377b6f7b30/09960703-30d2-4a2c-af9e-39377b6f7b30.vmdk to [datastore2] 34a807d6-1fdd-47d8-a72e-bf2345de321b/34a807d6-1fdd-47d8-a72e-bf2345de321b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1426.075457] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21e358f3-962c-478f-be1b-930892dae9a1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.081168] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1426.081168] env[68569]: value = "task-3168105" [ 1426.081168] env[68569]: _type = "Task" [ 1426.081168] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.088508] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168105, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.591828] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168105, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.597546] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1427.092808] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168105, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.593239] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168105, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.094598] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168105, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.595162] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168105, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.184484} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.595435] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/09960703-30d2-4a2c-af9e-39377b6f7b30/09960703-30d2-4a2c-af9e-39377b6f7b30.vmdk to [datastore2] 34a807d6-1fdd-47d8-a72e-bf2345de321b/34a807d6-1fdd-47d8-a72e-bf2345de321b.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1428.596236] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f828e34-e801-4b70-a0eb-77c865bd3a62 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.617582] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 34a807d6-1fdd-47d8-a72e-bf2345de321b/34a807d6-1fdd-47d8-a72e-bf2345de321b.vmdk or device None with type streamOptimized {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1428.617861] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7f20e43-3d86-4cc4-91ac-acc1ef792da1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.636266] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1428.636266] env[68569]: value = "task-3168106" [ 1428.636266] env[68569]: _type = "Task" [ 1428.636266] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.643385] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168106, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.147419] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168106, 'name': ReconfigVM_Task, 'duration_secs': 0.275526} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.147764] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 34a807d6-1fdd-47d8-a72e-bf2345de321b/34a807d6-1fdd-47d8-a72e-bf2345de321b.vmdk or device None with type streamOptimized {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1429.148282] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e1d96ab2-ad1c-466a-b2f8-f29e34281eac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.154140] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1429.154140] env[68569]: value = "task-3168107" [ 1429.154140] env[68569]: _type = "Task" [ 1429.154140] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.161090] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168107, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.664535] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168107, 'name': Rename_Task, 'duration_secs': 0.166717} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.664839] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1429.665103] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95f341a5-ce9b-4740-80c3-0fbed95c6749 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.671307] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1429.671307] env[68569]: value = "task-3168108" [ 1429.671307] env[68569]: _type = "Task" [ 1429.671307] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.678983] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168108, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.181597] env[68569]: DEBUG oslo_vmware.api [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168108, 'name': PowerOnVM_Task, 'duration_secs': 0.400947} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.182347] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1430.271194] env[68569]: DEBUG nova.compute.manager [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1430.272109] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34843dac-926f-4a12-b32d-e4290a4a99c5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.788144] env[68569]: DEBUG oslo_concurrency.lockutils [None req-241e8fb0-78a4-4cbd-9811-96feb05e96f8 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.264s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1431.578672] env[68569]: DEBUG oslo_concurrency.lockutils [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "34a807d6-1fdd-47d8-a72e-bf2345de321b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1431.579192] env[68569]: DEBUG oslo_concurrency.lockutils [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1431.579353] env[68569]: DEBUG oslo_concurrency.lockutils [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "34a807d6-1fdd-47d8-a72e-bf2345de321b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1431.579555] env[68569]: DEBUG oslo_concurrency.lockutils [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1431.579728] env[68569]: DEBUG oslo_concurrency.lockutils [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1431.581850] env[68569]: INFO nova.compute.manager [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Terminating instance [ 1432.085589] env[68569]: DEBUG nova.compute.manager [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1432.085813] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1432.086732] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41215b27-29df-4a29-9348-0ff6a24f8553 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.094878] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1432.095110] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3faf0d2-bd75-4577-b010-aea5be148e9d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.101512] env[68569]: DEBUG oslo_vmware.api [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1432.101512] env[68569]: value = "task-3168109" [ 1432.101512] env[68569]: _type = "Task" [ 1432.101512] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.108875] env[68569]: DEBUG oslo_vmware.api [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168109, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.611387] env[68569]: DEBUG oslo_vmware.api [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168109, 'name': PowerOffVM_Task, 'duration_secs': 0.174068} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.611752] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1432.611752] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1432.611974] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cabb8287-a6b3-46cd-88a4-c6af360f6865 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.673588] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1432.673849] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1432.673989] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Deleting the datastore file [datastore2] 34a807d6-1fdd-47d8-a72e-bf2345de321b {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1432.674309] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e030ee2-be21-4b39-b8e3-9d7f442b3027 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.681140] env[68569]: DEBUG oslo_vmware.api [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for the task: (returnval){ [ 1432.681140] env[68569]: value = "task-3168111" [ 1432.681140] env[68569]: _type = "Task" [ 1432.681140] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.688307] env[68569]: DEBUG oslo_vmware.api [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168111, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.095384] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.095634] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.095770] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.095952] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.096138] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Cleaning up deleted instances with incomplete migration {{(pid=68569) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11879}} [ 1433.192011] env[68569]: DEBUG oslo_vmware.api [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Task: {'id': task-3168111, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.119586} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.192272] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1433.192453] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1433.192619] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1433.192782] env[68569]: INFO nova.compute.manager [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1433.193017] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1433.193208] env[68569]: DEBUG nova.compute.manager [-] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1433.193300] env[68569]: DEBUG nova.network.neutron [-] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1433.591551] env[68569]: DEBUG nova.compute.manager [req-b2300ae7-566b-4d5a-b2db-7c1604a4a4af req-8d6ac5eb-7e07-472e-ab77-7b6b5e848d4f service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Received event network-vif-deleted-b83be58c-0aca-40bd-b217-035d5fadc9a1 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1433.591551] env[68569]: INFO nova.compute.manager [req-b2300ae7-566b-4d5a-b2db-7c1604a4a4af req-8d6ac5eb-7e07-472e-ab77-7b6b5e848d4f service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Neutron deleted interface b83be58c-0aca-40bd-b217-035d5fadc9a1; detaching it from the instance and deleting it from the info cache [ 1433.591551] env[68569]: DEBUG nova.network.neutron [req-b2300ae7-566b-4d5a-b2db-7c1604a4a4af req-8d6ac5eb-7e07-472e-ab77-7b6b5e848d4f service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.070788] env[68569]: DEBUG nova.network.neutron [-] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.094321] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7398e433-14db-448b-a8bf-6684f7b28003 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.099454] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1434.104152] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e4c6e88-29fb-4856-9551-f33ba5c17954 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.127984] env[68569]: DEBUG nova.compute.manager [req-b2300ae7-566b-4d5a-b2db-7c1604a4a4af req-8d6ac5eb-7e07-472e-ab77-7b6b5e848d4f service nova] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Detach interface failed, port_id=b83be58c-0aca-40bd-b217-035d5fadc9a1, reason: Instance 34a807d6-1fdd-47d8-a72e-bf2345de321b could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1434.574091] env[68569]: INFO nova.compute.manager [-] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Took 1.38 seconds to deallocate network for instance. [ 1434.596825] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1435.079896] env[68569]: DEBUG oslo_concurrency.lockutils [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1435.080235] env[68569]: DEBUG oslo_concurrency.lockutils [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1435.080429] env[68569]: DEBUG nova.objects.instance [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lazy-loading 'resources' on Instance uuid 34a807d6-1fdd-47d8-a72e-bf2345de321b {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1435.597364] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1435.682350] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0877662f-54cc-4172-a0c2-1f6089992476 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.690047] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd30eb2-f61c-4204-8f29-6cbb736855da {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.720484] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47df0e1-682b-4e7d-8219-bc06ecbe158a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.727360] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5865f9fe-304a-4333-a11b-0ae8d714611a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.739952] env[68569]: DEBUG nova.compute.provider_tree [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1436.243425] env[68569]: DEBUG nova.scheduler.client.report [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1436.749045] env[68569]: DEBUG oslo_concurrency.lockutils [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.668s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1436.769698] env[68569]: INFO nova.scheduler.client.report [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Deleted allocations for instance 34a807d6-1fdd-47d8-a72e-bf2345de321b [ 1437.277044] env[68569]: DEBUG oslo_concurrency.lockutils [None req-32e61fa0-49ba-419d-91ed-abc92940fa45 tempest-ServerActionsTestOtherB-311505698 tempest-ServerActionsTestOtherB-311505698-project-member] Lock "34a807d6-1fdd-47d8-a72e-bf2345de321b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.697s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1437.596843] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1438.100135] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1438.100394] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1438.100532] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1438.100691] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68569) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1438.101639] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0618350a-9954-449f-8ca9-6da6bd7b824f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.109338] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ac453f-71c8-43bb-8858-061ec5f96b7e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.122708] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980d64e2-a931-4b70-8313-bf44047f6ea6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.129235] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4793ea2b-e954-4da2-938e-e871115b2c56 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.157871] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181021MB free_disk=129GB free_vcpus=48 pci_devices=None {{(pid=68569) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1438.158029] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1438.158227] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1439.176882] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1439.177152] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1439.189325] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1fc42d8-e910-497d-b81f-e2bbb4ae9938 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.196909] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7022eb6a-6f1a-4cf4-974c-2ecefccd66c9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.225203] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0121a1ec-1078-47fd-bc67-974ed351cd5a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.231640] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218dcd58-918c-40bf-8318-89dbd0089f51 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.243915] env[68569]: DEBUG nova.compute.provider_tree [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1439.747328] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1440.253929] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1440.254342] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.096s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1442.255377] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1442.255702] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68569) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11160}} [ 1442.597616] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1442.597893] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Cleaning up deleted instances {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11841}} [ 1443.106959] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] There are 25 instances to clean {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11850}} [ 1443.107166] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 34a807d6-1fdd-47d8-a72e-bf2345de321b] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1443.611510] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: d593a549-45d3-4ae2-a4ac-96e945a762e3] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1444.115240] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 612b436d-b0ac-4bda-8248-9b354201fde0] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1444.618570] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 87acc843-9940-4887-81ff-3fba98343340] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1445.122520] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 1055b531-4bca-4245-ab61-698c21b9e484] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1445.433954] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1445.434518] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1445.625583] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: d57645fc-82d5-4ae8-93c3-0de095a66649] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1445.733034] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "fad4a057-8e5d-40c9-96ec-815910eb2dcd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1445.733293] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "fad4a057-8e5d-40c9-96ec-815910eb2dcd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1445.937385] env[68569]: DEBUG nova.compute.manager [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1446.129458] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 9f1a51d3-8b34-4a0c-9fa3-21eac0c550b6] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1446.236159] env[68569]: DEBUG nova.compute.manager [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1446.459990] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1446.460304] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1446.461863] env[68569]: INFO nova.compute.claims [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1446.632989] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: e4fc902a-05c1-419c-9019-c22fa0f9ae9d] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1446.759038] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1447.136925] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: c1b3fe48-aaf4-425a-a97c-e3c9a070db8b] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1447.506368] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e2a633-d5cc-4d86-84bb-3b82bade057e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.514120] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1041d1-c61f-4340-b2e7-a1162e0c6e8d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.543094] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3514e198-fd40-4519-a688-aac6d9fa8648 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.549831] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6eb5db-c8b8-4822-9efe-0aeddb08d08e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.563310] env[68569]: DEBUG nova.compute.provider_tree [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1447.639992] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 693d59a2-f8f5-4f63-af55-192b0c458ddf] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1448.066196] env[68569]: DEBUG nova.scheduler.client.report [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1448.142672] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 7289fe12-4f1f-488f-9be2-a7cb666727b3] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1448.570688] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.110s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1448.571316] env[68569]: DEBUG nova.compute.manager [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1448.574061] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.815s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1448.575492] env[68569]: INFO nova.compute.claims [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1448.645857] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 6317f756-c9ed-4858-bb2a-c20d9f82f90d] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1449.079604] env[68569]: DEBUG nova.compute.utils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1449.082709] env[68569]: DEBUG nova.compute.manager [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1449.082906] env[68569]: DEBUG nova.network.neutron [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1449.118578] env[68569]: DEBUG nova.policy [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '550de9c12d9d45349678d30dd3e61568', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e99ea1e4c887441d9fe1204996bff8fb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1449.148341] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 47fa6c6a-a31f-4eea-86b0-807dba6a6b4d] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1449.341490] env[68569]: DEBUG nova.network.neutron [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Successfully created port: 20033b81-8427-4557-b3d1-160c08d94511 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1449.589228] env[68569]: DEBUG nova.compute.manager [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1449.629717] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac782be6-c23f-4cb3-be5c-e9ead834c890 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.637613] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0afb3ab-54f4-4493-9d8a-2f178417a1c6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.668653] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 3363dac8-c5df-405e-8bdc-9002e2d45e05] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1449.671213] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a281ca-4fa8-4fa0-bc02-b29076ecefe2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.679493] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4235e9-aa54-4ef7-89fa-0f7e145dc7ae {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.694391] env[68569]: DEBUG nova.compute.provider_tree [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1450.175625] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 09f4018b-f1cd-4726-b871-b110a7cf1b43] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1450.196996] env[68569]: DEBUG nova.scheduler.client.report [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1450.598798] env[68569]: DEBUG nova.compute.manager [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1450.631819] env[68569]: DEBUG nova.virt.hardware [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1450.632017] env[68569]: DEBUG nova.virt.hardware [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1450.632148] env[68569]: DEBUG nova.virt.hardware [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1450.632815] env[68569]: DEBUG nova.virt.hardware [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1450.632815] env[68569]: DEBUG nova.virt.hardware [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1450.632815] env[68569]: DEBUG nova.virt.hardware [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1450.633135] env[68569]: DEBUG nova.virt.hardware [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1450.633368] env[68569]: DEBUG nova.virt.hardware [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1450.633612] env[68569]: DEBUG nova.virt.hardware [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1450.633837] env[68569]: DEBUG nova.virt.hardware [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1450.634066] env[68569]: DEBUG nova.virt.hardware [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1450.635030] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d564de4-03f9-497c-b34c-1727e3af59ff {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.644344] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d92e930-3de9-4e2e-ad0d-d878a8800827 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.678811] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 5de9a459-a2a2-4d78-9a66-cf819e8893b6] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1450.701966] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.127s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1450.702825] env[68569]: DEBUG nova.compute.manager [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1450.756122] env[68569]: DEBUG nova.compute.manager [req-47169b4c-947e-4d16-93c4-e313a06aba20 req-6909701e-7589-4e6b-866e-431c128c1a69 service nova] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Received event network-vif-plugged-20033b81-8427-4557-b3d1-160c08d94511 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1450.756372] env[68569]: DEBUG oslo_concurrency.lockutils [req-47169b4c-947e-4d16-93c4-e313a06aba20 req-6909701e-7589-4e6b-866e-431c128c1a69 service nova] Acquiring lock "e8ba2946-f1d1-4b1f-b915-ba80ac5f087f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1450.756599] env[68569]: DEBUG oslo_concurrency.lockutils [req-47169b4c-947e-4d16-93c4-e313a06aba20 req-6909701e-7589-4e6b-866e-431c128c1a69 service nova] Lock "e8ba2946-f1d1-4b1f-b915-ba80ac5f087f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1450.756767] env[68569]: DEBUG oslo_concurrency.lockutils [req-47169b4c-947e-4d16-93c4-e313a06aba20 req-6909701e-7589-4e6b-866e-431c128c1a69 service nova] Lock "e8ba2946-f1d1-4b1f-b915-ba80ac5f087f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1450.756933] env[68569]: DEBUG nova.compute.manager [req-47169b4c-947e-4d16-93c4-e313a06aba20 req-6909701e-7589-4e6b-866e-431c128c1a69 service nova] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] No waiting events found dispatching network-vif-plugged-20033b81-8427-4557-b3d1-160c08d94511 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1450.757124] env[68569]: WARNING nova.compute.manager [req-47169b4c-947e-4d16-93c4-e313a06aba20 req-6909701e-7589-4e6b-866e-431c128c1a69 service nova] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Received unexpected event network-vif-plugged-20033b81-8427-4557-b3d1-160c08d94511 for instance with vm_state building and task_state spawning. [ 1450.834994] env[68569]: DEBUG nova.network.neutron [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Successfully updated port: 20033b81-8427-4557-b3d1-160c08d94511 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1451.181510] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: fdcdd4b5-82bd-43c9-8865-807f86789a99] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1451.206739] env[68569]: DEBUG nova.compute.utils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1451.207925] env[68569]: DEBUG nova.compute.manager [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1451.208100] env[68569]: DEBUG nova.network.neutron [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1451.243498] env[68569]: DEBUG nova.policy [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '550de9c12d9d45349678d30dd3e61568', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e99ea1e4c887441d9fe1204996bff8fb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1451.337650] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "refresh_cache-e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.337811] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "refresh_cache-e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1451.337950] env[68569]: DEBUG nova.network.neutron [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1451.477365] env[68569]: DEBUG nova.network.neutron [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Successfully created port: d2756c2c-59f0-4d39-a132-7170724fdf05 {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1451.686724] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: fca05228-2a17-4a7e-94a0-449ba74a8933] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1451.713020] env[68569]: DEBUG nova.compute.manager [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1451.869117] env[68569]: DEBUG nova.network.neutron [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1451.985659] env[68569]: DEBUG nova.network.neutron [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Updating instance_info_cache with network_info: [{"id": "20033b81-8427-4557-b3d1-160c08d94511", "address": "fa:16:3e:19:af:7c", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20033b81-84", "ovs_interfaceid": "20033b81-8427-4557-b3d1-160c08d94511", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1452.189975] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 6dbe8a18-c317-4b36-bd6f-922ce9f85b6a] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1452.488630] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "refresh_cache-e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1452.488911] env[68569]: DEBUG nova.compute.manager [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Instance network_info: |[{"id": "20033b81-8427-4557-b3d1-160c08d94511", "address": "fa:16:3e:19:af:7c", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20033b81-84", "ovs_interfaceid": "20033b81-8427-4557-b3d1-160c08d94511", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1452.489373] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:af:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '20033b81-8427-4557-b3d1-160c08d94511', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1452.497205] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Creating folder: Project (e99ea1e4c887441d9fe1204996bff8fb). Parent ref: group-v633430. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1452.497462] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c6ce503-fe6e-4362-aee6-1cbcb1b0e740 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.508877] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Created folder: Project (e99ea1e4c887441d9fe1204996bff8fb) in parent group-v633430. [ 1452.509058] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Creating folder: Instances. Parent ref: group-v633775. {{(pid=68569) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1452.509293] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c68acaf-001e-460a-91cc-815326e3119a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.518492] env[68569]: INFO nova.virt.vmwareapi.vm_util [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Created folder: Instances in parent group-v633775. [ 1452.518709] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1452.518889] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1452.519108] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fedeab28-fd05-4bd6-9d9e-3f764f6928bc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.537889] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1452.537889] env[68569]: value = "task-3168115" [ 1452.537889] env[68569]: _type = "Task" [ 1452.537889] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.546208] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168115, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.693017] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: ee188712-b0e0-44ee-80b4-be72da32299f] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1452.720760] env[68569]: DEBUG nova.compute.manager [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1452.748889] env[68569]: DEBUG nova.virt.hardware [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1452.749188] env[68569]: DEBUG nova.virt.hardware [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1452.749369] env[68569]: DEBUG nova.virt.hardware [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1452.749572] env[68569]: DEBUG nova.virt.hardware [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1452.749730] env[68569]: DEBUG nova.virt.hardware [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1452.749883] env[68569]: DEBUG nova.virt.hardware [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1452.750116] env[68569]: DEBUG nova.virt.hardware [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1452.750293] env[68569]: DEBUG nova.virt.hardware [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1452.750479] env[68569]: DEBUG nova.virt.hardware [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1452.750643] env[68569]: DEBUG nova.virt.hardware [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1452.750827] env[68569]: DEBUG nova.virt.hardware [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1452.752104] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab42312-b065-45b1-a2a8-5c1d5ef1daaa {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.760383] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22caa68-a038-44aa-a763-3ec1eb7ad80f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.782621] env[68569]: DEBUG nova.compute.manager [req-5f59223a-9b84-4d48-8b6d-7fe280cc84b3 req-c7f084c5-5cba-4809-a717-5887fd2a8fad service nova] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Received event network-changed-20033b81-8427-4557-b3d1-160c08d94511 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1452.782825] env[68569]: DEBUG nova.compute.manager [req-5f59223a-9b84-4d48-8b6d-7fe280cc84b3 req-c7f084c5-5cba-4809-a717-5887fd2a8fad service nova] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Refreshing instance network info cache due to event network-changed-20033b81-8427-4557-b3d1-160c08d94511. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1452.783054] env[68569]: DEBUG oslo_concurrency.lockutils [req-5f59223a-9b84-4d48-8b6d-7fe280cc84b3 req-c7f084c5-5cba-4809-a717-5887fd2a8fad service nova] Acquiring lock "refresh_cache-e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.783216] env[68569]: DEBUG oslo_concurrency.lockutils [req-5f59223a-9b84-4d48-8b6d-7fe280cc84b3 req-c7f084c5-5cba-4809-a717-5887fd2a8fad service nova] Acquired lock "refresh_cache-e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1452.783378] env[68569]: DEBUG nova.network.neutron [req-5f59223a-9b84-4d48-8b6d-7fe280cc84b3 req-c7f084c5-5cba-4809-a717-5887fd2a8fad service nova] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Refreshing network info cache for port 20033b81-8427-4557-b3d1-160c08d94511 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1453.048521] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168115, 'name': CreateVM_Task, 'duration_secs': 0.302402} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.048763] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1453.049322] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.049493] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1453.049821] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1453.050084] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a303d863-f14a-494c-82d5-cf0112b94d3a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.054987] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1453.054987] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52297b0e-33bf-0be8-8821-ea30d5d46895" [ 1453.054987] env[68569]: _type = "Task" [ 1453.054987] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.063194] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52297b0e-33bf-0be8-8821-ea30d5d46895, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.174136] env[68569]: DEBUG nova.network.neutron [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Successfully updated port: d2756c2c-59f0-4d39-a132-7170724fdf05 {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1453.196255] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: a71f5626-bf27-44f6-8e03-9bc8a4e8a5c3] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1453.458278] env[68569]: DEBUG nova.network.neutron [req-5f59223a-9b84-4d48-8b6d-7fe280cc84b3 req-c7f084c5-5cba-4809-a717-5887fd2a8fad service nova] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Updated VIF entry in instance network info cache for port 20033b81-8427-4557-b3d1-160c08d94511. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1453.458671] env[68569]: DEBUG nova.network.neutron [req-5f59223a-9b84-4d48-8b6d-7fe280cc84b3 req-c7f084c5-5cba-4809-a717-5887fd2a8fad service nova] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Updating instance_info_cache with network_info: [{"id": "20033b81-8427-4557-b3d1-160c08d94511", "address": "fa:16:3e:19:af:7c", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20033b81-84", "ovs_interfaceid": "20033b81-8427-4557-b3d1-160c08d94511", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1453.564670] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52297b0e-33bf-0be8-8821-ea30d5d46895, 'name': SearchDatastore_Task, 'duration_secs': 0.011249} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.564911] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1453.565153] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1453.565386] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.565532] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1453.565708] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1453.565962] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6fd9234e-bdb7-43d4-ab8f-6207814b5e34 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.573925] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1453.574107] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1453.574758] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26355501-399b-45fb-8ff8-fd52d7c6405b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.579809] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1453.579809] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52427b5f-7a85-0ad2-5029-5ffc08a377a9" [ 1453.579809] env[68569]: _type = "Task" [ 1453.579809] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.586626] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52427b5f-7a85-0ad2-5029-5ffc08a377a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.676173] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "refresh_cache-fad4a057-8e5d-40c9-96ec-815910eb2dcd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.676368] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "refresh_cache-fad4a057-8e5d-40c9-96ec-815910eb2dcd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1453.676468] env[68569]: DEBUG nova.network.neutron [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1453.698947] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: f5f8b054-7ee4-40f5-84de-1cee02949cd2] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1453.961961] env[68569]: DEBUG oslo_concurrency.lockutils [req-5f59223a-9b84-4d48-8b6d-7fe280cc84b3 req-c7f084c5-5cba-4809-a717-5887fd2a8fad service nova] Releasing lock "refresh_cache-e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1454.091317] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52427b5f-7a85-0ad2-5029-5ffc08a377a9, 'name': SearchDatastore_Task, 'duration_secs': 0.008363} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.092154] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09671f2b-ae46-43be-84ac-5e643c912e6c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.098235] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1454.098235] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52460d96-0668-e861-3607-22d8612d87c1" [ 1454.098235] env[68569]: _type = "Task" [ 1454.098235] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.107243] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52460d96-0668-e861-3607-22d8612d87c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.202285] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: db75de86-9dda-42b2-9e7a-55e2ba5adad1] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1454.205653] env[68569]: DEBUG nova.network.neutron [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1454.315755] env[68569]: DEBUG nova.network.neutron [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Updating instance_info_cache with network_info: [{"id": "d2756c2c-59f0-4d39-a132-7170724fdf05", "address": "fa:16:3e:b5:d6:91", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2756c2c-59", "ovs_interfaceid": "d2756c2c-59f0-4d39-a132-7170724fdf05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1454.608758] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52460d96-0668-e861-3607-22d8612d87c1, 'name': SearchDatastore_Task, 'duration_secs': 0.009529} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.609028] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1454.609292] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] e8ba2946-f1d1-4b1f-b915-ba80ac5f087f/e8ba2946-f1d1-4b1f-b915-ba80ac5f087f.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1454.609530] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-964f2016-ba58-4519-9cfa-f4efe5daaa37 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.615864] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1454.615864] env[68569]: value = "task-3168116" [ 1454.615864] env[68569]: _type = "Task" [ 1454.615864] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.623979] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168116, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.705681] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: 61aa0997-ffa6-4551-bdaa-132026e240f9] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1454.813423] env[68569]: DEBUG nova.compute.manager [req-c9e5d8da-1ffb-4898-aff2-7e946728ec39 req-b40237fd-8ba0-4386-8ee5-465b05dee23c service nova] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Received event network-vif-plugged-d2756c2c-59f0-4d39-a132-7170724fdf05 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1454.813590] env[68569]: DEBUG oslo_concurrency.lockutils [req-c9e5d8da-1ffb-4898-aff2-7e946728ec39 req-b40237fd-8ba0-4386-8ee5-465b05dee23c service nova] Acquiring lock "fad4a057-8e5d-40c9-96ec-815910eb2dcd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1454.813942] env[68569]: DEBUG oslo_concurrency.lockutils [req-c9e5d8da-1ffb-4898-aff2-7e946728ec39 req-b40237fd-8ba0-4386-8ee5-465b05dee23c service nova] Lock "fad4a057-8e5d-40c9-96ec-815910eb2dcd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1454.813942] env[68569]: DEBUG oslo_concurrency.lockutils [req-c9e5d8da-1ffb-4898-aff2-7e946728ec39 req-b40237fd-8ba0-4386-8ee5-465b05dee23c service nova] Lock "fad4a057-8e5d-40c9-96ec-815910eb2dcd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1454.814114] env[68569]: DEBUG nova.compute.manager [req-c9e5d8da-1ffb-4898-aff2-7e946728ec39 req-b40237fd-8ba0-4386-8ee5-465b05dee23c service nova] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] No waiting events found dispatching network-vif-plugged-d2756c2c-59f0-4d39-a132-7170724fdf05 {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1454.814282] env[68569]: WARNING nova.compute.manager [req-c9e5d8da-1ffb-4898-aff2-7e946728ec39 req-b40237fd-8ba0-4386-8ee5-465b05dee23c service nova] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Received unexpected event network-vif-plugged-d2756c2c-59f0-4d39-a132-7170724fdf05 for instance with vm_state building and task_state spawning. [ 1454.814439] env[68569]: DEBUG nova.compute.manager [req-c9e5d8da-1ffb-4898-aff2-7e946728ec39 req-b40237fd-8ba0-4386-8ee5-465b05dee23c service nova] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Received event network-changed-d2756c2c-59f0-4d39-a132-7170724fdf05 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1454.814586] env[68569]: DEBUG nova.compute.manager [req-c9e5d8da-1ffb-4898-aff2-7e946728ec39 req-b40237fd-8ba0-4386-8ee5-465b05dee23c service nova] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Refreshing instance network info cache due to event network-changed-d2756c2c-59f0-4d39-a132-7170724fdf05. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1454.814746] env[68569]: DEBUG oslo_concurrency.lockutils [req-c9e5d8da-1ffb-4898-aff2-7e946728ec39 req-b40237fd-8ba0-4386-8ee5-465b05dee23c service nova] Acquiring lock "refresh_cache-fad4a057-8e5d-40c9-96ec-815910eb2dcd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1454.818039] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "refresh_cache-fad4a057-8e5d-40c9-96ec-815910eb2dcd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1454.818379] env[68569]: DEBUG nova.compute.manager [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Instance network_info: |[{"id": "d2756c2c-59f0-4d39-a132-7170724fdf05", "address": "fa:16:3e:b5:d6:91", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2756c2c-59", "ovs_interfaceid": "d2756c2c-59f0-4d39-a132-7170724fdf05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1454.818876] env[68569]: DEBUG oslo_concurrency.lockutils [req-c9e5d8da-1ffb-4898-aff2-7e946728ec39 req-b40237fd-8ba0-4386-8ee5-465b05dee23c service nova] Acquired lock "refresh_cache-fad4a057-8e5d-40c9-96ec-815910eb2dcd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1454.819116] env[68569]: DEBUG nova.network.neutron [req-c9e5d8da-1ffb-4898-aff2-7e946728ec39 req-b40237fd-8ba0-4386-8ee5-465b05dee23c service nova] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Refreshing network info cache for port d2756c2c-59f0-4d39-a132-7170724fdf05 {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1454.820280] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:d6:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2756c2c-59f0-4d39-a132-7170724fdf05', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1454.828085] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1454.829362] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1454.829596] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f78f2d9a-09df-492d-b176-edd003caf142 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.851424] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1454.851424] env[68569]: value = "task-3168117" [ 1454.851424] env[68569]: _type = "Task" [ 1454.851424] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.861259] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168117, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.126192] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168116, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.435435} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.126456] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] e8ba2946-f1d1-4b1f-b915-ba80ac5f087f/e8ba2946-f1d1-4b1f-b915-ba80ac5f087f.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1455.126651] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1455.126895] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-515c218a-5bb8-49ff-81f6-a42d7cca1230 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.132654] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1455.132654] env[68569]: value = "task-3168118" [ 1455.132654] env[68569]: _type = "Task" [ 1455.132654] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.141516] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168118, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.209261] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] [instance: a9e87dfc-6e00-4e55-8a8f-bc3174b991da] Instance has had 0 of 5 cleanup attempts {{(pid=68569) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11854}} [ 1455.362498] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168117, 'name': CreateVM_Task, 'duration_secs': 0.426225} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.362664] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1455.363317] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.363479] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1455.363785] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1455.364297] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-010d0747-6167-4a28-806a-c63e49d05e8a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.368382] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1455.368382] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a5839b-549e-abc5-ca3c-168c8dae84db" [ 1455.368382] env[68569]: _type = "Task" [ 1455.368382] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.375583] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a5839b-549e-abc5-ca3c-168c8dae84db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.509838] env[68569]: DEBUG nova.network.neutron [req-c9e5d8da-1ffb-4898-aff2-7e946728ec39 req-b40237fd-8ba0-4386-8ee5-465b05dee23c service nova] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Updated VIF entry in instance network info cache for port d2756c2c-59f0-4d39-a132-7170724fdf05. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1455.510198] env[68569]: DEBUG nova.network.neutron [req-c9e5d8da-1ffb-4898-aff2-7e946728ec39 req-b40237fd-8ba0-4386-8ee5-465b05dee23c service nova] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Updating instance_info_cache with network_info: [{"id": "d2756c2c-59f0-4d39-a132-7170724fdf05", "address": "fa:16:3e:b5:d6:91", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2756c2c-59", "ovs_interfaceid": "d2756c2c-59f0-4d39-a132-7170724fdf05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1455.642365] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168118, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065025} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.642621] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1455.643470] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a6cb17-f498-4cc3-a899-1cd6af39fcbf {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.665050] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] e8ba2946-f1d1-4b1f-b915-ba80ac5f087f/e8ba2946-f1d1-4b1f-b915-ba80ac5f087f.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1455.665236] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf60eaff-6ea0-475f-b215-97437f4c8b35 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.683749] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1455.683749] env[68569]: value = "task-3168119" [ 1455.683749] env[68569]: _type = "Task" [ 1455.683749] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.692232] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168119, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.879228] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a5839b-549e-abc5-ca3c-168c8dae84db, 'name': SearchDatastore_Task, 'duration_secs': 0.009681} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.879621] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1455.879850] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1455.880156] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.880357] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1455.880595] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1455.880893] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7fa1b23f-23d0-40f6-b953-5062b6ce8da0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.888954] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1455.889191] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1455.889973] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5359c605-68f8-4676-93e1-d2941bbb4c28 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.895545] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1455.895545] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52870657-467b-1135-030f-62a80d65c3ad" [ 1455.895545] env[68569]: _type = "Task" [ 1455.895545] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.902844] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52870657-467b-1135-030f-62a80d65c3ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.012822] env[68569]: DEBUG oslo_concurrency.lockutils [req-c9e5d8da-1ffb-4898-aff2-7e946728ec39 req-b40237fd-8ba0-4386-8ee5-465b05dee23c service nova] Releasing lock "refresh_cache-fad4a057-8e5d-40c9-96ec-815910eb2dcd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1456.195316] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168119, 'name': ReconfigVM_Task, 'duration_secs': 0.266011} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.195540] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Reconfigured VM instance instance-0000007d to attach disk [datastore2] e8ba2946-f1d1-4b1f-b915-ba80ac5f087f/e8ba2946-f1d1-4b1f-b915-ba80ac5f087f.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1456.196170] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1a535a9-e943-4f54-a66e-4d2df079452e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.201960] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1456.201960] env[68569]: value = "task-3168120" [ 1456.201960] env[68569]: _type = "Task" [ 1456.201960] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.209210] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168120, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.405698] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52870657-467b-1135-030f-62a80d65c3ad, 'name': SearchDatastore_Task, 'duration_secs': 0.009955} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.406458] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-273e3485-d68f-49ab-8f21-432d7b6682b3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.411108] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1456.411108] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fbdc1d-7bcc-1738-99ca-64b01f5a6e91" [ 1456.411108] env[68569]: _type = "Task" [ 1456.411108] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.417949] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fbdc1d-7bcc-1738-99ca-64b01f5a6e91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.711766] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168120, 'name': Rename_Task, 'duration_secs': 0.138183} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.712035] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1456.712281] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f29a9f7-7329-4df6-9cb7-984e555c6ed4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.718223] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1456.718223] env[68569]: value = "task-3168121" [ 1456.718223] env[68569]: _type = "Task" [ 1456.718223] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.725086] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168121, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.921583] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52fbdc1d-7bcc-1738-99ca-64b01f5a6e91, 'name': SearchDatastore_Task, 'duration_secs': 0.010979} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.921996] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1456.922178] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] fad4a057-8e5d-40c9-96ec-815910eb2dcd/fad4a057-8e5d-40c9-96ec-815910eb2dcd.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1456.922461] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a764b011-a1a5-4370-aa7e-d574d17fc98b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.928882] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1456.928882] env[68569]: value = "task-3168122" [ 1456.928882] env[68569]: _type = "Task" [ 1456.928882] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.936484] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168122, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.228295] env[68569]: DEBUG oslo_vmware.api [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168121, 'name': PowerOnVM_Task, 'duration_secs': 0.431875} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.228679] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1457.228922] env[68569]: INFO nova.compute.manager [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Took 6.63 seconds to spawn the instance on the hypervisor. [ 1457.229128] env[68569]: DEBUG nova.compute.manager [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1457.230021] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b63b575-2ac9-4105-b54e-0d884946ba0b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.438675] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168122, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446661} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.438943] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] fad4a057-8e5d-40c9-96ec-815910eb2dcd/fad4a057-8e5d-40c9-96ec-815910eb2dcd.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1457.439133] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1457.439404] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b922c02c-ba10-4948-9f62-bc728023da27 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.445113] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1457.445113] env[68569]: value = "task-3168123" [ 1457.445113] env[68569]: _type = "Task" [ 1457.445113] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.452781] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168123, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.749907] env[68569]: INFO nova.compute.manager [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Took 11.31 seconds to build instance. [ 1457.956412] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168123, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.053387} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.956828] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1457.957425] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880fb655-3873-4aae-94dc-0608ad2a7d38 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.980240] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] fad4a057-8e5d-40c9-96ec-815910eb2dcd/fad4a057-8e5d-40c9-96ec-815910eb2dcd.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1457.980507] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e41efddb-745d-4173-b5c6-ee466bb47b53 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.998887] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1457.998887] env[68569]: value = "task-3168124" [ 1457.998887] env[68569]: _type = "Task" [ 1457.998887] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.010171] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168124, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.252768] env[68569]: DEBUG oslo_concurrency.lockutils [None req-12c0cad3-f0d6-4a40-ac5d-a4505fa7f033 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.818s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1458.508928] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168124, 'name': ReconfigVM_Task, 'duration_secs': 0.313322} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.509220] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Reconfigured VM instance instance-0000007e to attach disk [datastore2] fad4a057-8e5d-40c9-96ec-815910eb2dcd/fad4a057-8e5d-40c9-96ec-815910eb2dcd.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1458.509887] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a00c5e0b-b946-47a3-a30a-8abf857f5a7a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.515957] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1458.515957] env[68569]: value = "task-3168125" [ 1458.515957] env[68569]: _type = "Task" [ 1458.515957] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.523278] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168125, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.025372] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168125, 'name': Rename_Task, 'duration_secs': 0.134316} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.025711] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1459.025853] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ae423d2-690a-40ca-98f2-5607c78c0dbc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.031787] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1459.031787] env[68569]: value = "task-3168126" [ 1459.031787] env[68569]: _type = "Task" [ 1459.031787] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.038982] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168126, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.541800] env[68569]: DEBUG oslo_vmware.api [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168126, 'name': PowerOnVM_Task, 'duration_secs': 0.491111} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.542727] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1459.542727] env[68569]: INFO nova.compute.manager [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Took 6.82 seconds to spawn the instance on the hypervisor. [ 1459.542903] env[68569]: DEBUG nova.compute.manager [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1459.543652] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eea33fc-475b-4e94-9078-a9e7ca84d29f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.065117] env[68569]: INFO nova.compute.manager [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Took 13.32 seconds to build instance. [ 1460.567926] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5e84644e-8226-4e1a-be60-8607519f0316 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "fad4a057-8e5d-40c9-96ec-815910eb2dcd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.834s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1461.009474] env[68569]: INFO nova.compute.manager [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Rescuing [ 1461.009707] env[68569]: DEBUG oslo_concurrency.lockutils [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "refresh_cache-fad4a057-8e5d-40c9-96ec-815910eb2dcd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.009867] env[68569]: DEBUG oslo_concurrency.lockutils [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "refresh_cache-fad4a057-8e5d-40c9-96ec-815910eb2dcd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1461.010053] env[68569]: DEBUG nova.network.neutron [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1461.695882] env[68569]: DEBUG nova.network.neutron [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Updating instance_info_cache with network_info: [{"id": "d2756c2c-59f0-4d39-a132-7170724fdf05", "address": "fa:16:3e:b5:d6:91", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2756c2c-59", "ovs_interfaceid": "d2756c2c-59f0-4d39-a132-7170724fdf05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.198541] env[68569]: DEBUG oslo_concurrency.lockutils [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "refresh_cache-fad4a057-8e5d-40c9-96ec-815910eb2dcd" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1463.736020] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1463.736404] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d055128e-c763-4443-a095-975339ce654c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.744989] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1463.744989] env[68569]: value = "task-3168127" [ 1463.744989] env[68569]: _type = "Task" [ 1463.744989] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.752569] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168127, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.255232] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168127, 'name': PowerOffVM_Task, 'duration_secs': 0.172402} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.255488] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1464.256257] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9684086-8739-478f-8a10-128cded74c50 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.273703] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f241e6-c19b-479a-b132-d5df21c925fc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.299141] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1464.299430] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-96f39a61-ef37-4674-8a8b-d099f91b9a69 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.305454] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1464.305454] env[68569]: value = "task-3168128" [ 1464.305454] env[68569]: _type = "Task" [ 1464.305454] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.312592] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168128, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.816437] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] VM already powered off {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1464.816862] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1464.816951] env[68569]: DEBUG oslo_concurrency.lockutils [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1464.817040] env[68569]: DEBUG oslo_concurrency.lockutils [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1464.817220] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1464.817472] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae3a71e2-e669-45d9-9289-4af90555e37b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.833370] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1464.833558] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1464.834307] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f3351df-09b2-4ade-9b65-dc61bfbc9ad5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.839136] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1464.839136] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f94cea-7a30-b9ff-9143-6112aac6bb6a" [ 1464.839136] env[68569]: _type = "Task" [ 1464.839136] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.846984] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f94cea-7a30-b9ff-9143-6112aac6bb6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.350135] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52f94cea-7a30-b9ff-9143-6112aac6bb6a, 'name': SearchDatastore_Task, 'duration_secs': 0.011442} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.350909] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3876973c-cfca-4403-bee0-bbdd81e12171 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.355860] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1465.355860] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52554af8-949b-fbe9-b617-ff3285f414b6" [ 1465.355860] env[68569]: _type = "Task" [ 1465.355860] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.364005] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52554af8-949b-fbe9-b617-ff3285f414b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.866058] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52554af8-949b-fbe9-b617-ff3285f414b6, 'name': SearchDatastore_Task, 'duration_secs': 0.009115} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.866380] env[68569]: DEBUG oslo_concurrency.lockutils [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1465.866571] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] fad4a057-8e5d-40c9-96ec-815910eb2dcd/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk. {{(pid=68569) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1465.866821] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b7e62c89-983c-4b6c-b590-379da110615e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.872885] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1465.872885] env[68569]: value = "task-3168129" [ 1465.872885] env[68569]: _type = "Task" [ 1465.872885] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.879718] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168129, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.382528] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168129, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.43041} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.382832] env[68569]: INFO nova.virt.vmwareapi.ds_util [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] fad4a057-8e5d-40c9-96ec-815910eb2dcd/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk. [ 1466.383594] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c92b11-1b9d-4c49-a68b-3e7375a57db4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.407500] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] fad4a057-8e5d-40c9-96ec-815910eb2dcd/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1466.407721] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81e7cda5-bfd2-4b3f-9f0b-fdf57b5689d0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.424807] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1466.424807] env[68569]: value = "task-3168130" [ 1466.424807] env[68569]: _type = "Task" [ 1466.424807] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.432110] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168130, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.934470] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168130, 'name': ReconfigVM_Task, 'duration_secs': 0.255446} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.934843] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Reconfigured VM instance instance-0000007e to attach disk [datastore2] fad4a057-8e5d-40c9-96ec-815910eb2dcd/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1466.935563] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b41449-a9e8-4506-847a-b5d7dc346cb4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.959474] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fda9d296-c648-4238-8ee9-784637fa10a1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.973615] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1466.973615] env[68569]: value = "task-3168131" [ 1466.973615] env[68569]: _type = "Task" [ 1466.973615] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.981228] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168131, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.483053] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168131, 'name': ReconfigVM_Task, 'duration_secs': 0.140104} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.483391] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1467.483643] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ea51614-044a-4d73-846c-14213befb719 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.489368] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1467.489368] env[68569]: value = "task-3168132" [ 1467.489368] env[68569]: _type = "Task" [ 1467.489368] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.496379] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168132, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.000646] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168132, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.501231] env[68569]: DEBUG oslo_vmware.api [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168132, 'name': PowerOnVM_Task, 'duration_secs': 0.556638} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.501503] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1468.504297] env[68569]: DEBUG nova.compute.manager [None req-af83df83-2f36-44a4-a8f4-fa97b2f6dfad tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1468.505040] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef03244-2798-4ae0-ba8c-85a8c07aaeea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.315029] env[68569]: INFO nova.compute.manager [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Rescuing [ 1471.315029] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "refresh_cache-e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.315029] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "refresh_cache-e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1471.315029] env[68569]: DEBUG nova.network.neutron [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1471.989999] env[68569]: DEBUG nova.network.neutron [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Updating instance_info_cache with network_info: [{"id": "20033b81-8427-4557-b3d1-160c08d94511", "address": "fa:16:3e:19:af:7c", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20033b81-84", "ovs_interfaceid": "20033b81-8427-4557-b3d1-160c08d94511", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.493286] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "refresh_cache-e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1474.028050] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1474.028050] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-366cc084-8753-4483-ab89-b9c6b21814a6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.035199] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1474.035199] env[68569]: value = "task-3168133" [ 1474.035199] env[68569]: _type = "Task" [ 1474.035199] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.043311] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168133, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.546053] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168133, 'name': PowerOffVM_Task, 'duration_secs': 0.197884} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.546053] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1474.546316] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8a81ff-4e19-48f1-a2d0-5e5b44f6008c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.564118] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c905ad23-8106-4d0b-ac15-151476f6da42 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.589721] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1474.589982] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e0c48715-ae56-48f2-871d-66be02bd2ec4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.596023] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1474.596023] env[68569]: value = "task-3168134" [ 1474.596023] env[68569]: _type = "Task" [ 1474.596023] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.604257] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168134, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.106713] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] VM already powered off {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1475.107263] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1475.107263] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.107396] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1475.107537] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1475.107758] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9f07db9-de1a-4575-a433-fcec3dbd5568 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.116091] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1475.116297] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1475.116981] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-801fe558-b4f3-43c6-897c-cf7877e4c17a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.121947] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1475.121947] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527b0dda-19bc-7fc1-73bb-97a1e16a421d" [ 1475.121947] env[68569]: _type = "Task" [ 1475.121947] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.129490] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527b0dda-19bc-7fc1-73bb-97a1e16a421d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.633065] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]527b0dda-19bc-7fc1-73bb-97a1e16a421d, 'name': SearchDatastore_Task, 'duration_secs': 0.007993} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.633341] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efecdeaf-54d0-4bcf-93df-be0883985381 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.638306] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1475.638306] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524711f9-36ab-0abf-a81a-c53e94f6170d" [ 1475.638306] env[68569]: _type = "Task" [ 1475.638306] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.645681] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524711f9-36ab-0abf-a81a-c53e94f6170d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.148778] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524711f9-36ab-0abf-a81a-c53e94f6170d, 'name': SearchDatastore_Task, 'duration_secs': 0.008986} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.149173] env[68569]: DEBUG oslo_concurrency.lockutils [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1476.149305] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] e8ba2946-f1d1-4b1f-b915-ba80ac5f087f/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk. {{(pid=68569) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1476.149562] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2a21e54-11df-479b-9889-0b9126cd1919 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.156592] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1476.156592] env[68569]: value = "task-3168135" [ 1476.156592] env[68569]: _type = "Task" [ 1476.156592] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.163704] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.666331] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168135, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.443539} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.666583] env[68569]: INFO nova.virt.vmwareapi.ds_util [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore2] e8ba2946-f1d1-4b1f-b915-ba80ac5f087f/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk. [ 1476.667361] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e1aeba-2d0a-4c31-a34e-3fb15bacd537 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.691385] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] e8ba2946-f1d1-4b1f-b915-ba80ac5f087f/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1476.691586] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b31cd3d1-dea3-407d-a44d-ac570eadb4f5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.708076] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1476.708076] env[68569]: value = "task-3168136" [ 1476.708076] env[68569]: _type = "Task" [ 1476.708076] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.715360] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168136, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.217739] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168136, 'name': ReconfigVM_Task, 'duration_secs': 0.29517} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.218176] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Reconfigured VM instance instance-0000007d to attach disk [datastore2] e8ba2946-f1d1-4b1f-b915-ba80ac5f087f/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1477.218881] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd67a70e-a9b4-48a5-ab01-379154941a29 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.242788] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f760fb2-7b89-4731-bb33-947ffb3fa441 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.257056] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1477.257056] env[68569]: value = "task-3168137" [ 1477.257056] env[68569]: _type = "Task" [ 1477.257056] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.265461] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168137, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.766774] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168137, 'name': ReconfigVM_Task, 'duration_secs': 0.142217} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.767023] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1477.767276] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf191dd4-79b7-4deb-9e22-15d4327905a9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.773656] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1477.773656] env[68569]: value = "task-3168138" [ 1477.773656] env[68569]: _type = "Task" [ 1477.773656] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.780930] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168138, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.283447] env[68569]: DEBUG oslo_vmware.api [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168138, 'name': PowerOnVM_Task, 'duration_secs': 0.360051} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.283842] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1478.286226] env[68569]: DEBUG nova.compute.manager [None req-5cf710e4-aca9-4478-8795-8ad2b5c1f4ac tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1478.287042] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b5a1c4-fbd0-4475-a340-b79711506494 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.251348] env[68569]: INFO nova.compute.manager [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Unrescuing [ 1479.251642] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "refresh_cache-e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.251720] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "refresh_cache-e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1479.251929] env[68569]: DEBUG nova.network.neutron [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1479.972210] env[68569]: DEBUG nova.network.neutron [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Updating instance_info_cache with network_info: [{"id": "20033b81-8427-4557-b3d1-160c08d94511", "address": "fa:16:3e:19:af:7c", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20033b81-84", "ovs_interfaceid": "20033b81-8427-4557-b3d1-160c08d94511", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.474916] env[68569]: DEBUG oslo_concurrency.lockutils [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "refresh_cache-e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1480.475630] env[68569]: DEBUG nova.objects.instance [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lazy-loading 'flavor' on Instance uuid e8ba2946-f1d1-4b1f-b915-ba80ac5f087f {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1480.982157] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1825355-737b-432e-9649-30ab4d51d945 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.002736] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1481.003040] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b6f6964-877f-4a0d-8852-a37d0203c511 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.010160] env[68569]: DEBUG oslo_vmware.api [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1481.010160] env[68569]: value = "task-3168139" [ 1481.010160] env[68569]: _type = "Task" [ 1481.010160] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.018585] env[68569]: DEBUG oslo_vmware.api [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168139, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.520064] env[68569]: DEBUG oslo_vmware.api [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168139, 'name': PowerOffVM_Task, 'duration_secs': 0.249636} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.520342] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1481.525534] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Reconfiguring VM instance instance-0000007d to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1481.525793] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b27c5d26-aae1-48d0-b6b4-91523ae72c4f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.543347] env[68569]: DEBUG oslo_vmware.api [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1481.543347] env[68569]: value = "task-3168140" [ 1481.543347] env[68569]: _type = "Task" [ 1481.543347] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.550647] env[68569]: DEBUG oslo_vmware.api [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168140, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.053352] env[68569]: DEBUG oslo_vmware.api [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168140, 'name': ReconfigVM_Task, 'duration_secs': 0.224563} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.054044] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Reconfigured VM instance instance-0000007d to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1482.054044] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1482.054044] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0d04369-a8dd-48cf-a3a8-df10f207406b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.082814] env[68569]: DEBUG oslo_vmware.api [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1482.082814] env[68569]: value = "task-3168141" [ 1482.082814] env[68569]: _type = "Task" [ 1482.082814] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.082814] env[68569]: DEBUG oslo_vmware.api [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.570371] env[68569]: DEBUG oslo_vmware.api [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168141, 'name': PowerOnVM_Task, 'duration_secs': 0.350941} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.570638] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1482.570870] env[68569]: DEBUG nova.compute.manager [None req-bd137291-2d59-4818-b9e3-990a46ec07c5 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1482.571647] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b68e82-6451-4b89-b248-e4d24270ae16 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.927516] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "c309956b-1da2-4c75-a23e-2c34286a4849" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1485.927769] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "c309956b-1da2-4c75-a23e-2c34286a4849" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1486.430129] env[68569]: DEBUG nova.compute.manager [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Starting instance... {{(pid=68569) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1486.954486] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1486.954795] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1486.956319] env[68569]: INFO nova.compute.claims [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1488.010977] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae1c433-6f7c-4150-b8c8-681f2628e42b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.018179] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ea5d52-d67f-4fbe-b651-48ad85264234 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.047463] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd520bf5-aaad-4897-a0d2-24983f7850fd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.054324] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc6ec06-3aab-4955-8839-335835154bed {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.066801] env[68569]: DEBUG nova.compute.provider_tree [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1488.570367] env[68569]: DEBUG nova.scheduler.client.report [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1489.076567] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.122s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1489.077099] env[68569]: DEBUG nova.compute.manager [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Start building networks asynchronously for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1489.583056] env[68569]: DEBUG nova.compute.utils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1489.584281] env[68569]: DEBUG nova.compute.manager [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Allocating IP information in the background. {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1489.584453] env[68569]: DEBUG nova.network.neutron [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] allocate_for_instance() {{(pid=68569) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1489.630342] env[68569]: DEBUG nova.policy [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '550de9c12d9d45349678d30dd3e61568', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e99ea1e4c887441d9fe1204996bff8fb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68569) authorize /opt/stack/nova/nova/policy.py:192}} [ 1489.900972] env[68569]: DEBUG nova.network.neutron [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Successfully created port: 723a385f-adb7-495c-b42d-e888e80473ad {{(pid=68569) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1490.088282] env[68569]: DEBUG nova.compute.manager [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Start building block device mappings for instance. {{(pid=68569) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1491.098652] env[68569]: DEBUG nova.compute.manager [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Start spawning the instance on the hypervisor. {{(pid=68569) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1491.126401] env[68569]: DEBUG nova.virt.hardware [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-26T04:48:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-26T04:48:24Z,direct_url=,disk_format='vmdk',id=cfcf6154-fe87-45d3-9aaf-2d3604c95629,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f32ccefd074040788fb3a7a857f48173',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-26T04:48:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1491.126655] env[68569]: DEBUG nova.virt.hardware [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Flavor limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1491.126809] env[68569]: DEBUG nova.virt.hardware [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Image limits 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1491.126986] env[68569]: DEBUG nova.virt.hardware [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Flavor pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1491.127140] env[68569]: DEBUG nova.virt.hardware [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Image pref 0:0:0 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1491.127287] env[68569]: DEBUG nova.virt.hardware [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68569) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1491.127545] env[68569]: DEBUG nova.virt.hardware [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1491.127709] env[68569]: DEBUG nova.virt.hardware [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1491.127865] env[68569]: DEBUG nova.virt.hardware [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Got 1 possible topologies {{(pid=68569) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1491.128031] env[68569]: DEBUG nova.virt.hardware [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1491.128217] env[68569]: DEBUG nova.virt.hardware [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68569) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1491.129116] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e775be2-1ce6-4672-a138-35486a690fd5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.136983] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd808d71-5065-4d0a-8db7-0a5086b645e7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.278199] env[68569]: DEBUG nova.compute.manager [req-4962da37-8d20-47a2-ab9a-e9db920df746 req-2b544c4a-84b7-4e23-816f-897195f738fd service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Received event network-vif-plugged-723a385f-adb7-495c-b42d-e888e80473ad {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1491.278433] env[68569]: DEBUG oslo_concurrency.lockutils [req-4962da37-8d20-47a2-ab9a-e9db920df746 req-2b544c4a-84b7-4e23-816f-897195f738fd service nova] Acquiring lock "c309956b-1da2-4c75-a23e-2c34286a4849-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1491.278640] env[68569]: DEBUG oslo_concurrency.lockutils [req-4962da37-8d20-47a2-ab9a-e9db920df746 req-2b544c4a-84b7-4e23-816f-897195f738fd service nova] Lock "c309956b-1da2-4c75-a23e-2c34286a4849-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1491.278795] env[68569]: DEBUG oslo_concurrency.lockutils [req-4962da37-8d20-47a2-ab9a-e9db920df746 req-2b544c4a-84b7-4e23-816f-897195f738fd service nova] Lock "c309956b-1da2-4c75-a23e-2c34286a4849-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1491.278956] env[68569]: DEBUG nova.compute.manager [req-4962da37-8d20-47a2-ab9a-e9db920df746 req-2b544c4a-84b7-4e23-816f-897195f738fd service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] No waiting events found dispatching network-vif-plugged-723a385f-adb7-495c-b42d-e888e80473ad {{(pid=68569) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1491.279127] env[68569]: WARNING nova.compute.manager [req-4962da37-8d20-47a2-ab9a-e9db920df746 req-2b544c4a-84b7-4e23-816f-897195f738fd service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Received unexpected event network-vif-plugged-723a385f-adb7-495c-b42d-e888e80473ad for instance with vm_state building and task_state spawning. [ 1491.813343] env[68569]: DEBUG nova.network.neutron [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Successfully updated port: 723a385f-adb7-495c-b42d-e888e80473ad {{(pid=68569) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1491.850594] env[68569]: DEBUG nova.compute.manager [req-85057b6f-2356-4b04-b170-caf733a1cbe9 req-f54f9890-82b3-4220-993b-171ff8cb1730 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Received event network-changed-723a385f-adb7-495c-b42d-e888e80473ad {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1491.850791] env[68569]: DEBUG nova.compute.manager [req-85057b6f-2356-4b04-b170-caf733a1cbe9 req-f54f9890-82b3-4220-993b-171ff8cb1730 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Refreshing instance network info cache due to event network-changed-723a385f-adb7-495c-b42d-e888e80473ad. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1491.850988] env[68569]: DEBUG oslo_concurrency.lockutils [req-85057b6f-2356-4b04-b170-caf733a1cbe9 req-f54f9890-82b3-4220-993b-171ff8cb1730 service nova] Acquiring lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.851141] env[68569]: DEBUG oslo_concurrency.lockutils [req-85057b6f-2356-4b04-b170-caf733a1cbe9 req-f54f9890-82b3-4220-993b-171ff8cb1730 service nova] Acquired lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1491.851309] env[68569]: DEBUG nova.network.neutron [req-85057b6f-2356-4b04-b170-caf733a1cbe9 req-f54f9890-82b3-4220-993b-171ff8cb1730 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Refreshing network info cache for port 723a385f-adb7-495c-b42d-e888e80473ad {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1492.316367] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.383873] env[68569]: DEBUG nova.network.neutron [req-85057b6f-2356-4b04-b170-caf733a1cbe9 req-f54f9890-82b3-4220-993b-171ff8cb1730 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1492.448067] env[68569]: DEBUG nova.network.neutron [req-85057b6f-2356-4b04-b170-caf733a1cbe9 req-f54f9890-82b3-4220-993b-171ff8cb1730 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.951149] env[68569]: DEBUG oslo_concurrency.lockutils [req-85057b6f-2356-4b04-b170-caf733a1cbe9 req-f54f9890-82b3-4220-993b-171ff8cb1730 service nova] Releasing lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1492.951531] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1492.951694] env[68569]: DEBUG nova.network.neutron [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1493.480725] env[68569]: DEBUG nova.network.neutron [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Instance cache missing network info. {{(pid=68569) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1493.591201] env[68569]: DEBUG nova.network.neutron [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Updating instance_info_cache with network_info: [{"id": "723a385f-adb7-495c-b42d-e888e80473ad", "address": "fa:16:3e:f6:7d:21", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723a385f-ad", "ovs_interfaceid": "723a385f-adb7-495c-b42d-e888e80473ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.093666] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1494.093988] env[68569]: DEBUG nova.compute.manager [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Instance network_info: |[{"id": "723a385f-adb7-495c-b42d-e888e80473ad", "address": "fa:16:3e:f6:7d:21", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723a385f-ad", "ovs_interfaceid": "723a385f-adb7-495c-b42d-e888e80473ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68569) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1494.094431] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:7d:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '723a385f-adb7-495c-b42d-e888e80473ad', 'vif_model': 'vmxnet3'}] {{(pid=68569) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1494.101768] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1494.101966] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Creating VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1494.102221] env[68569]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12c9d514-1813-46f6-a148-0c6687512d34 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.122532] env[68569]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1494.122532] env[68569]: value = "task-3168142" [ 1494.122532] env[68569]: _type = "Task" [ 1494.122532] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.129843] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168142, 'name': CreateVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.633047] env[68569]: DEBUG oslo_vmware.api [-] Task: {'id': task-3168142, 'name': CreateVM_Task, 'duration_secs': 0.307108} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.633504] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Created VM on the ESX host {{(pid=68569) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1494.633856] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.634040] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1494.634361] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1494.634611] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f32e789-e3e9-48c8-8ff3-f1c2c9fdab60 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.638906] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1494.638906] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ee7bc1-a717-05d3-bb0f-2870030d94f1" [ 1494.638906] env[68569]: _type = "Task" [ 1494.638906] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.648478] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ee7bc1-a717-05d3-bb0f-2870030d94f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.148610] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52ee7bc1-a717-05d3-bb0f-2870030d94f1, 'name': SearchDatastore_Task, 'duration_secs': 0.010481} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.148915] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1495.149163] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1495.149398] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.149544] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1495.149721] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1495.149975] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9557708-a9fc-41ba-b72e-a44ee8fdcaf8 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.157697] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1495.157863] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1495.158529] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9549ea08-ab3f-46e2-b68d-4a32d07e812e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.163277] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1495.163277] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524c5bee-9ac5-ef44-8734-0b4f58f574e1" [ 1495.163277] env[68569]: _type = "Task" [ 1495.163277] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.170550] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524c5bee-9ac5-ef44-8734-0b4f58f574e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.673282] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]524c5bee-9ac5-ef44-8734-0b4f58f574e1, 'name': SearchDatastore_Task, 'duration_secs': 0.008182} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.673981] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc414384-edaa-4fbf-93ac-8c79ce263cdd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.678648] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1495.678648] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52135b02-3154-0c47-695c-c446f85a75b7" [ 1495.678648] env[68569]: _type = "Task" [ 1495.678648] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.685704] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52135b02-3154-0c47-695c-c446f85a75b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.189576] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52135b02-3154-0c47-695c-c446f85a75b7, 'name': SearchDatastore_Task, 'duration_secs': 0.010097} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.189838] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1496.190102] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] c309956b-1da2-4c75-a23e-2c34286a4849/c309956b-1da2-4c75-a23e-2c34286a4849.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1496.190358] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c35b4f5a-4912-4ceb-8dc6-8d5825e41e0d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.197071] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1496.197071] env[68569]: value = "task-3168143" [ 1496.197071] env[68569]: _type = "Task" [ 1496.197071] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.204407] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168143, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.707162] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168143, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460001} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.707923] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] c309956b-1da2-4c75-a23e-2c34286a4849/c309956b-1da2-4c75-a23e-2c34286a4849.vmdk {{(pid=68569) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1496.707923] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Extending root virtual disk to 1048576 {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1496.707923] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f365a15-f050-4c28-908a-52dd77de7c48 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.718172] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1496.718172] env[68569]: value = "task-3168144" [ 1496.718172] env[68569]: _type = "Task" [ 1496.718172] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.726614] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168144, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.227976] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168144, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060721} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.228261] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Extended root virtual disk {{(pid=68569) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1497.229009] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1514301-a932-4dd5-9fe1-d43340858076 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.252383] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] c309956b-1da2-4c75-a23e-2c34286a4849/c309956b-1da2-4c75-a23e-2c34286a4849.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1497.252692] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2b5470d-ffc0-4d05-a63e-8bfe2a5ca56f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.281889] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1497.281889] env[68569]: value = "task-3168145" [ 1497.281889] env[68569]: _type = "Task" [ 1497.281889] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.291990] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168145, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.790889] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168145, 'name': ReconfigVM_Task, 'duration_secs': 0.438015} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.791284] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Reconfigured VM instance instance-0000007f to attach disk [datastore1] c309956b-1da2-4c75-a23e-2c34286a4849/c309956b-1da2-4c75-a23e-2c34286a4849.vmdk or device None with type sparse {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1497.791761] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f76d19a-22c4-4914-af7d-10932b7cb7c3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.797564] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1497.797564] env[68569]: value = "task-3168146" [ 1497.797564] env[68569]: _type = "Task" [ 1497.797564] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.805556] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168146, 'name': Rename_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.306982] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168146, 'name': Rename_Task, 'duration_secs': 0.135504} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.307271] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1498.307513] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c18af616-e429-48f4-a386-5ef2f9f9c9b3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.313795] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1498.313795] env[68569]: value = "task-3168147" [ 1498.313795] env[68569]: _type = "Task" [ 1498.313795] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.320796] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168147, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.822730] env[68569]: DEBUG oslo_vmware.api [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168147, 'name': PowerOnVM_Task, 'duration_secs': 0.48274} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.823112] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1498.823221] env[68569]: INFO nova.compute.manager [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Took 7.72 seconds to spawn the instance on the hypervisor. [ 1498.823418] env[68569]: DEBUG nova.compute.manager [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1498.824183] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0411532-56f9-4ff4-a97f-bb22e12f6439 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.342395] env[68569]: INFO nova.compute.manager [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Took 12.41 seconds to build instance. [ 1499.844685] env[68569]: DEBUG oslo_concurrency.lockutils [None req-2dc2359b-240b-4903-aa80-69cb88a1006f tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "c309956b-1da2-4c75-a23e-2c34286a4849" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.917s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1499.968015] env[68569]: DEBUG nova.compute.manager [req-473ce6d8-c3e8-46d0-bd39-3b1b90b95a6f req-07289d6b-73f5-4f26-a4f1-dd219449b55f service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Received event network-changed-723a385f-adb7-495c-b42d-e888e80473ad {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1499.968257] env[68569]: DEBUG nova.compute.manager [req-473ce6d8-c3e8-46d0-bd39-3b1b90b95a6f req-07289d6b-73f5-4f26-a4f1-dd219449b55f service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Refreshing instance network info cache due to event network-changed-723a385f-adb7-495c-b42d-e888e80473ad. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1499.968961] env[68569]: DEBUG oslo_concurrency.lockutils [req-473ce6d8-c3e8-46d0-bd39-3b1b90b95a6f req-07289d6b-73f5-4f26-a4f1-dd219449b55f service nova] Acquiring lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.969212] env[68569]: DEBUG oslo_concurrency.lockutils [req-473ce6d8-c3e8-46d0-bd39-3b1b90b95a6f req-07289d6b-73f5-4f26-a4f1-dd219449b55f service nova] Acquired lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1499.969351] env[68569]: DEBUG nova.network.neutron [req-473ce6d8-c3e8-46d0-bd39-3b1b90b95a6f req-07289d6b-73f5-4f26-a4f1-dd219449b55f service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Refreshing network info cache for port 723a385f-adb7-495c-b42d-e888e80473ad {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1500.672518] env[68569]: DEBUG nova.network.neutron [req-473ce6d8-c3e8-46d0-bd39-3b1b90b95a6f req-07289d6b-73f5-4f26-a4f1-dd219449b55f service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Updated VIF entry in instance network info cache for port 723a385f-adb7-495c-b42d-e888e80473ad. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1500.672888] env[68569]: DEBUG nova.network.neutron [req-473ce6d8-c3e8-46d0-bd39-3b1b90b95a6f req-07289d6b-73f5-4f26-a4f1-dd219449b55f service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Updating instance_info_cache with network_info: [{"id": "723a385f-adb7-495c-b42d-e888e80473ad", "address": "fa:16:3e:f6:7d:21", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723a385f-ad", "ovs_interfaceid": "723a385f-adb7-495c-b42d-e888e80473ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.175724] env[68569]: DEBUG oslo_concurrency.lockutils [req-473ce6d8-c3e8-46d0-bd39-3b1b90b95a6f req-07289d6b-73f5-4f26-a4f1-dd219449b55f service nova] Releasing lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1505.712577] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1505.712994] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.218450] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.218667] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.218842] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.219009] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.219161] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.219309] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.219446] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68569) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11160}} [ 1506.219586] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.723115] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1506.723396] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1506.723511] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1506.723728] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68569) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1506.724631] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa83d13-b89b-4a8f-8c6b-65c63ace7550 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.733017] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f56783c-ba31-4e98-98dd-e99ca91dff2e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.747501] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20811f3-03ca-4e80-b6b2-727163cbe32d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.754210] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5232e9dc-4a43-4aac-9b0b-b349ae56e5c9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.782187] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180787MB free_disk=129GB free_vcpus=48 pci_devices=None {{(pid=68569) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1506.782336] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1506.782582] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1507.807097] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance e8ba2946-f1d1-4b1f-b915-ba80ac5f087f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1507.807333] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance fad4a057-8e5d-40c9-96ec-815910eb2dcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1507.807417] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance c309956b-1da2-4c75-a23e-2c34286a4849 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1507.807550] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1507.807684] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1507.855768] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a801b9a7-5c37-4531-a4e7-9137992b0aa0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.863087] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6bbcaa-1826-4623-be1b-a9eff72a2f02 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.893783] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e48165-a553-4e8d-8b8c-dbed92f19b9a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.900546] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7166f2-c06a-4a9e-a8e7-2709ec3d449f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.913178] env[68569]: DEBUG nova.compute.provider_tree [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1508.416268] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1508.921526] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1508.921884] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.139s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1537.162026] env[68569]: DEBUG oslo_concurrency.lockutils [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "c309956b-1da2-4c75-a23e-2c34286a4849" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1537.162026] env[68569]: DEBUG oslo_concurrency.lockutils [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "c309956b-1da2-4c75-a23e-2c34286a4849" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1537.665295] env[68569]: DEBUG nova.compute.utils [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Using /dev/sd instead of None {{(pid=68569) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1538.169168] env[68569]: DEBUG oslo_concurrency.lockutils [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "c309956b-1da2-4c75-a23e-2c34286a4849" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1539.233577] env[68569]: DEBUG oslo_concurrency.lockutils [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "c309956b-1da2-4c75-a23e-2c34286a4849" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1539.233945] env[68569]: DEBUG oslo_concurrency.lockutils [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "c309956b-1da2-4c75-a23e-2c34286a4849" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1539.234095] env[68569]: INFO nova.compute.manager [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Attaching volume 41b8509b-279e-4d25-8a21-56b9db8be1ad to /dev/sdb [ 1539.263594] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eece0bf7-34d6-48e4-bb0c-0e8233270158 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.270426] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f6143a-ec95-49bf-9cb0-531bf4135d85 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.284242] env[68569]: DEBUG nova.virt.block_device [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Updating existing volume attachment record: e97b7a6c-71b4-48e5-b599-ae84fb600566 {{(pid=68569) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1543.825756] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Volume attach. Driver type: vmdk {{(pid=68569) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1543.825999] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633781', 'volume_id': '41b8509b-279e-4d25-8a21-56b9db8be1ad', 'name': 'volume-41b8509b-279e-4d25-8a21-56b9db8be1ad', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c309956b-1da2-4c75-a23e-2c34286a4849', 'attached_at': '', 'detached_at': '', 'volume_id': '41b8509b-279e-4d25-8a21-56b9db8be1ad', 'serial': '41b8509b-279e-4d25-8a21-56b9db8be1ad'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1543.826891] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0f7b23-ecb2-4684-b592-6ff7bd06d6f3 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.843303] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af51d67-00e3-4147-9469-8bfc1e56e5b6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.867408] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] volume-41b8509b-279e-4d25-8a21-56b9db8be1ad/volume-41b8509b-279e-4d25-8a21-56b9db8be1ad.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1543.867408] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f049e12c-98b0-4e74-a11b-916d6e61db30 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.884230] env[68569]: DEBUG oslo_vmware.api [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1543.884230] env[68569]: value = "task-3168152" [ 1543.884230] env[68569]: _type = "Task" [ 1543.884230] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.891322] env[68569]: DEBUG oslo_vmware.api [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168152, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.394250] env[68569]: DEBUG oslo_vmware.api [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168152, 'name': ReconfigVM_Task, 'duration_secs': 0.303667} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.394540] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Reconfigured VM instance instance-0000007f to attach disk [datastore1] volume-41b8509b-279e-4d25-8a21-56b9db8be1ad/volume-41b8509b-279e-4d25-8a21-56b9db8be1ad.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1544.399128] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3aac99b-e426-46d9-a744-cfed47e81782 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.413028] env[68569]: DEBUG oslo_vmware.api [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1544.413028] env[68569]: value = "task-3168153" [ 1544.413028] env[68569]: _type = "Task" [ 1544.413028] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.420121] env[68569]: DEBUG oslo_vmware.api [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168153, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.922723] env[68569]: DEBUG oslo_vmware.api [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168153, 'name': ReconfigVM_Task, 'duration_secs': 0.124871} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.923160] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633781', 'volume_id': '41b8509b-279e-4d25-8a21-56b9db8be1ad', 'name': 'volume-41b8509b-279e-4d25-8a21-56b9db8be1ad', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c309956b-1da2-4c75-a23e-2c34286a4849', 'attached_at': '', 'detached_at': '', 'volume_id': '41b8509b-279e-4d25-8a21-56b9db8be1ad', 'serial': '41b8509b-279e-4d25-8a21-56b9db8be1ad'} {{(pid=68569) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1545.959294] env[68569]: DEBUG nova.objects.instance [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lazy-loading 'flavor' on Instance uuid c309956b-1da2-4c75-a23e-2c34286a4849 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1546.465674] env[68569]: DEBUG oslo_concurrency.lockutils [None req-238890db-0016-448a-bb8d-e680dbaa0a90 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "c309956b-1da2-4c75-a23e-2c34286a4849" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.232s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1546.603614] env[68569]: INFO nova.compute.manager [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Rescuing [ 1546.603970] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.604194] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1546.604422] env[68569]: DEBUG nova.network.neutron [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1547.349560] env[68569]: DEBUG nova.network.neutron [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Updating instance_info_cache with network_info: [{"id": "723a385f-adb7-495c-b42d-e888e80473ad", "address": "fa:16:3e:f6:7d:21", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723a385f-ad", "ovs_interfaceid": "723a385f-adb7-495c-b42d-e888e80473ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.852402] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1549.389769] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1549.390160] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e6f5b58-30d6-4afa-bd9a-dbbfdd917a50 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.398183] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1549.398183] env[68569]: value = "task-3168154" [ 1549.398183] env[68569]: _type = "Task" [ 1549.398183] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.406445] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168154, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.908184] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168154, 'name': PowerOffVM_Task, 'duration_secs': 0.177291} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.908436] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1549.909223] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97fd1598-355e-4341-b659-60d8c1d7409d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.929529] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd604420-2fc3-4620-bfc8-5386756116cd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.954470] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1549.954714] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46226af4-3623-4045-8379-3c4d08c4ab2b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.960701] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1549.960701] env[68569]: value = "task-3168155" [ 1549.960701] env[68569]: _type = "Task" [ 1549.960701] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.967565] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168155, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.471599] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] VM already powered off {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1550.472042] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Processing image cfcf6154-fe87-45d3-9aaf-2d3604c95629 {{(pid=68569) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1550.473699] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.473699] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1550.473699] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1550.473699] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f6b77db-c0ca-48a4-a900-d6b014e59f79 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.481141] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68569) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1550.481315] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68569) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1550.482042] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d69eebf-09d4-4f42-ae67-2c4930d42404 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.486740] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1550.486740] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5229f732-972d-023a-88c5-c5480d0bc278" [ 1550.486740] env[68569]: _type = "Task" [ 1550.486740] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.494920] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5229f732-972d-023a-88c5-c5480d0bc278, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.997160] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]5229f732-972d-023a-88c5-c5480d0bc278, 'name': SearchDatastore_Task, 'duration_secs': 0.008672} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.997860] env[68569]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3600a34-e4bc-4186-ad80-7c0483cd60f4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.002619] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1551.002619] env[68569]: value = "session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a0558e-d546-2797-c444-3450d72d7cfe" [ 1551.002619] env[68569]: _type = "Task" [ 1551.002619] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.009771] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a0558e-d546-2797-c444-3450d72d7cfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.512619] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': session[52efbbdc-96a3-1099-4a72-21ad01a40a90]52a0558e-d546-2797-c444-3450d72d7cfe, 'name': SearchDatastore_Task, 'duration_secs': 0.017916} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.512964] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1551.513163] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] c309956b-1da2-4c75-a23e-2c34286a4849/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk. {{(pid=68569) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1551.513426] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9fce6aac-a226-4ef3-baae-e0207a887394 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.520797] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1551.520797] env[68569]: value = "task-3168156" [ 1551.520797] env[68569]: _type = "Task" [ 1551.520797] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.527964] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168156, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.030832] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168156, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.416303} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.031103] env[68569]: INFO nova.virt.vmwareapi.ds_util [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/cfcf6154-fe87-45d3-9aaf-2d3604c95629/cfcf6154-fe87-45d3-9aaf-2d3604c95629.vmdk to [datastore1] c309956b-1da2-4c75-a23e-2c34286a4849/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk. [ 1552.031865] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d01186-960a-4824-a3fb-6108112d160e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.058108] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] c309956b-1da2-4c75-a23e-2c34286a4849/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1552.058359] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84a31b2d-1642-497c-bf65-ae0c553c828a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.076016] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1552.076016] env[68569]: value = "task-3168157" [ 1552.076016] env[68569]: _type = "Task" [ 1552.076016] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.083035] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168157, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.586087] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168157, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.086144] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168157, 'name': ReconfigVM_Task, 'duration_secs': 0.546173} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.086424] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Reconfigured VM instance instance-0000007f to attach disk [datastore1] c309956b-1da2-4c75-a23e-2c34286a4849/cfcf6154-fe87-45d3-9aaf-2d3604c95629-rescue.vmdk or device None with type thin {{(pid=68569) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1553.087260] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c03825-fbcd-409c-9cad-f26c11926c14 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.113651] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03586d62-7f22-4b9a-8862-dd8b0f8fb468 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.128757] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1553.128757] env[68569]: value = "task-3168158" [ 1553.128757] env[68569]: _type = "Task" [ 1553.128757] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.137234] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168158, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.638373] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168158, 'name': ReconfigVM_Task, 'duration_secs': 0.160689} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.638746] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1553.638840] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-260b1e25-85a7-499e-85a2-3d96c6ab336b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.644970] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1553.644970] env[68569]: value = "task-3168159" [ 1553.644970] env[68569]: _type = "Task" [ 1553.644970] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.651942] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168159, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.155187] env[68569]: DEBUG oslo_vmware.api [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168159, 'name': PowerOnVM_Task, 'duration_secs': 0.365689} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.155443] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1554.158275] env[68569]: DEBUG nova.compute.manager [None req-f0fabd70-842f-45d3-8150-feaf2f2b4b4d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1554.159013] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a526eead-f1b1-404b-93c5-95448fa38136 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.721913] env[68569]: INFO nova.compute.manager [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Unrescuing [ 1555.722282] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1555.722331] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquired lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1555.722480] env[68569]: DEBUG nova.network.neutron [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Building network info cache for instance {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1556.434163] env[68569]: DEBUG nova.network.neutron [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Updating instance_info_cache with network_info: [{"id": "723a385f-adb7-495c-b42d-e888e80473ad", "address": "fa:16:3e:f6:7d:21", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723a385f-ad", "ovs_interfaceid": "723a385f-adb7-495c-b42d-e888e80473ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1556.937176] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Releasing lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1556.937829] env[68569]: DEBUG nova.objects.instance [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lazy-loading 'flavor' on Instance uuid c309956b-1da2-4c75-a23e-2c34286a4849 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1557.443559] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f751342-4499-4277-832e-eea783407fc2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.468218] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1557.468578] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f98989dd-bf02-4481-88c1-c02fce0532a2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.475135] env[68569]: DEBUG oslo_vmware.api [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1557.475135] env[68569]: value = "task-3168160" [ 1557.475135] env[68569]: _type = "Task" [ 1557.475135] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.483339] env[68569]: DEBUG oslo_vmware.api [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168160, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.985100] env[68569]: DEBUG oslo_vmware.api [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168160, 'name': PowerOffVM_Task, 'duration_secs': 0.209323} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.985483] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1557.990567] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Reconfiguring VM instance instance-0000007f to detach disk 2002 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1557.990818] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac5b508a-9957-48f2-9a65-38233ef3c4df {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.008314] env[68569]: DEBUG oslo_vmware.api [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1558.008314] env[68569]: value = "task-3168161" [ 1558.008314] env[68569]: _type = "Task" [ 1558.008314] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.015598] env[68569]: DEBUG oslo_vmware.api [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168161, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.517861] env[68569]: DEBUG oslo_vmware.api [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168161, 'name': ReconfigVM_Task, 'duration_secs': 0.377862} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.518083] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Reconfigured VM instance instance-0000007f to detach disk 2002 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1558.518270] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Powering on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1558.518505] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cce32419-9426-4d41-9c89-4f13b6e3dbe4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.525601] env[68569]: DEBUG oslo_vmware.api [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1558.525601] env[68569]: value = "task-3168162" [ 1558.525601] env[68569]: _type = "Task" [ 1558.525601] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.538588] env[68569]: DEBUG oslo_vmware.api [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168162, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.035567] env[68569]: DEBUG oslo_vmware.api [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168162, 'name': PowerOnVM_Task, 'duration_secs': 0.361262} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.035939] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Powered on the VM {{(pid=68569) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1559.036056] env[68569]: DEBUG nova.compute.manager [None req-f111b369-af02-48cf-82d1-eea67fcdbc13 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Checking state {{(pid=68569) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1559.036854] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ce2686-b401-4ba9-93c0-910fdc9408d9 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.163073] env[68569]: DEBUG nova.compute.manager [req-176bbbda-31fa-4faf-9a85-5556989b5408 req-e619ddaf-ee73-4620-b993-abfb814921b1 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Received event network-changed-723a385f-adb7-495c-b42d-e888e80473ad {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1560.163507] env[68569]: DEBUG nova.compute.manager [req-176bbbda-31fa-4faf-9a85-5556989b5408 req-e619ddaf-ee73-4620-b993-abfb814921b1 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Refreshing instance network info cache due to event network-changed-723a385f-adb7-495c-b42d-e888e80473ad. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1560.163507] env[68569]: DEBUG oslo_concurrency.lockutils [req-176bbbda-31fa-4faf-9a85-5556989b5408 req-e619ddaf-ee73-4620-b993-abfb814921b1 service nova] Acquiring lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.163652] env[68569]: DEBUG oslo_concurrency.lockutils [req-176bbbda-31fa-4faf-9a85-5556989b5408 req-e619ddaf-ee73-4620-b993-abfb814921b1 service nova] Acquired lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1560.163761] env[68569]: DEBUG nova.network.neutron [req-176bbbda-31fa-4faf-9a85-5556989b5408 req-e619ddaf-ee73-4620-b993-abfb814921b1 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Refreshing network info cache for port 723a385f-adb7-495c-b42d-e888e80473ad {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1560.852972] env[68569]: DEBUG nova.network.neutron [req-176bbbda-31fa-4faf-9a85-5556989b5408 req-e619ddaf-ee73-4620-b993-abfb814921b1 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Updated VIF entry in instance network info cache for port 723a385f-adb7-495c-b42d-e888e80473ad. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1560.853340] env[68569]: DEBUG nova.network.neutron [req-176bbbda-31fa-4faf-9a85-5556989b5408 req-e619ddaf-ee73-4620-b993-abfb814921b1 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Updating instance_info_cache with network_info: [{"id": "723a385f-adb7-495c-b42d-e888e80473ad", "address": "fa:16:3e:f6:7d:21", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723a385f-ad", "ovs_interfaceid": "723a385f-adb7-495c-b42d-e888e80473ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.356239] env[68569]: DEBUG oslo_concurrency.lockutils [req-176bbbda-31fa-4faf-9a85-5556989b5408 req-e619ddaf-ee73-4620-b993-abfb814921b1 service nova] Releasing lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1562.187444] env[68569]: DEBUG nova.compute.manager [req-4fc151b7-1411-42e4-8307-c249b25fed49 req-934dea99-35fb-4d7a-b42c-236f23c1cc66 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Received event network-changed-723a385f-adb7-495c-b42d-e888e80473ad {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1562.187615] env[68569]: DEBUG nova.compute.manager [req-4fc151b7-1411-42e4-8307-c249b25fed49 req-934dea99-35fb-4d7a-b42c-236f23c1cc66 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Refreshing instance network info cache due to event network-changed-723a385f-adb7-495c-b42d-e888e80473ad. {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11749}} [ 1562.187824] env[68569]: DEBUG oslo_concurrency.lockutils [req-4fc151b7-1411-42e4-8307-c249b25fed49 req-934dea99-35fb-4d7a-b42c-236f23c1cc66 service nova] Acquiring lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.187963] env[68569]: DEBUG oslo_concurrency.lockutils [req-4fc151b7-1411-42e4-8307-c249b25fed49 req-934dea99-35fb-4d7a-b42c-236f23c1cc66 service nova] Acquired lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1562.188136] env[68569]: DEBUG nova.network.neutron [req-4fc151b7-1411-42e4-8307-c249b25fed49 req-934dea99-35fb-4d7a-b42c-236f23c1cc66 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Refreshing network info cache for port 723a385f-adb7-495c-b42d-e888e80473ad {{(pid=68569) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1562.869962] env[68569]: DEBUG nova.network.neutron [req-4fc151b7-1411-42e4-8307-c249b25fed49 req-934dea99-35fb-4d7a-b42c-236f23c1cc66 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Updated VIF entry in instance network info cache for port 723a385f-adb7-495c-b42d-e888e80473ad. {{(pid=68569) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1562.870330] env[68569]: DEBUG nova.network.neutron [req-4fc151b7-1411-42e4-8307-c249b25fed49 req-934dea99-35fb-4d7a-b42c-236f23c1cc66 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Updating instance_info_cache with network_info: [{"id": "723a385f-adb7-495c-b42d-e888e80473ad", "address": "fa:16:3e:f6:7d:21", "network": {"id": "e7cd7444-9c1e-4957-9f13-0a1d4e5999ae", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2094380537-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e99ea1e4c887441d9fe1204996bff8fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723a385f-ad", "ovs_interfaceid": "723a385f-adb7-495c-b42d-e888e80473ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1563.373366] env[68569]: DEBUG oslo_concurrency.lockutils [req-4fc151b7-1411-42e4-8307-c249b25fed49 req-934dea99-35fb-4d7a-b42c-236f23c1cc66 service nova] Releasing lock "refresh_cache-c309956b-1da2-4c75-a23e-2c34286a4849" {{(pid=68569) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1568.923151] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.923526] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.923526] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.923674] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.923820] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.923959] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.924154] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.924322] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68569) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11160}} [ 1568.924477] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1569.427416] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1569.428896] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1569.428896] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1569.428896] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68569) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1569.429032] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5486fb-e2be-4033-a519-1cecf47a3fe6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.437143] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225ba191-4f14-4cf7-991b-8403922a9163 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.450611] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be8dd38-9b37-4b9c-a034-316b7e76d0d4 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.456575] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b85a8d-28d9-439b-9a51-56cf4d09a1fc {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.483952] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180501MB free_disk=129GB free_vcpus=48 pci_devices=None {{(pid=68569) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1569.484101] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1569.484343] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1570.509440] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance e8ba2946-f1d1-4b1f-b915-ba80ac5f087f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1570.509670] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance fad4a057-8e5d-40c9-96ec-815910eb2dcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1570.509741] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Instance c309956b-1da2-4c75-a23e-2c34286a4849 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68569) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1570.509894] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1570.510044] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1570.526183] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Refreshing inventories for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1570.537667] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Updating ProviderTree inventory for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1570.537835] env[68569]: DEBUG nova.compute.provider_tree [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Updating inventory in ProviderTree for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1570.547921] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Refreshing aggregate associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, aggregates: None {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1570.566067] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Refreshing trait associations for resource provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68569) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1570.605876] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afca9bad-a9f9-4fff-9856-9e3675905656 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.613015] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786888ed-0782-4002-8366-5399ab5fe102 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.642801] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4366fb84-9bc2-4d8c-a2fb-8b18547ad229 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.649414] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb901dfa-e6fb-4a1c-a567-e40f7698f50f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.661886] env[68569]: DEBUG nova.compute.provider_tree [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1571.165354] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1571.166706] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1571.166890] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.683s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1595.568599] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "c309956b-1da2-4c75-a23e-2c34286a4849" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1595.569021] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "c309956b-1da2-4c75-a23e-2c34286a4849" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1596.071986] env[68569]: INFO nova.compute.manager [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Detaching volume 41b8509b-279e-4d25-8a21-56b9db8be1ad [ 1596.101688] env[68569]: INFO nova.virt.block_device [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Attempting to driver detach volume 41b8509b-279e-4d25-8a21-56b9db8be1ad from mountpoint /dev/sdb [ 1596.101924] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Volume detach. Driver type: vmdk {{(pid=68569) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1596.102124] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633781', 'volume_id': '41b8509b-279e-4d25-8a21-56b9db8be1ad', 'name': 'volume-41b8509b-279e-4d25-8a21-56b9db8be1ad', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c309956b-1da2-4c75-a23e-2c34286a4849', 'attached_at': '', 'detached_at': '', 'volume_id': '41b8509b-279e-4d25-8a21-56b9db8be1ad', 'serial': '41b8509b-279e-4d25-8a21-56b9db8be1ad'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1596.103012] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d7cc0a-7b2f-427b-a689-3ea99a71b237 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.125786] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c598a8e-213f-44ed-aa32-65fb00bd6a33 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.132354] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75786101-c4c8-4c65-87ac-9f4c786d79d0 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.152233] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b386e5-d8f2-4d6b-a2bb-cd481313035f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.166243] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] The volume has not been displaced from its original location: [datastore1] volume-41b8509b-279e-4d25-8a21-56b9db8be1ad/volume-41b8509b-279e-4d25-8a21-56b9db8be1ad.vmdk. No consolidation needed. {{(pid=68569) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1596.171274] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Reconfiguring VM instance instance-0000007f to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1596.171563] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4c36bd0-08a6-4c5f-b296-59d22a3d5d6c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.188435] env[68569]: DEBUG oslo_vmware.api [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1596.188435] env[68569]: value = "task-3168163" [ 1596.188435] env[68569]: _type = "Task" [ 1596.188435] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.195451] env[68569]: DEBUG oslo_vmware.api [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168163, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.698316] env[68569]: DEBUG oslo_vmware.api [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168163, 'name': ReconfigVM_Task, 'duration_secs': 0.224023} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.698690] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Reconfigured VM instance instance-0000007f to detach disk 2001 {{(pid=68569) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1596.703083] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-620a9c50-7a48-4704-9f0a-05f70b92e35e {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.717010] env[68569]: DEBUG oslo_vmware.api [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1596.717010] env[68569]: value = "task-3168164" [ 1596.717010] env[68569]: _type = "Task" [ 1596.717010] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.724556] env[68569]: DEBUG oslo_vmware.api [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168164, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.226531] env[68569]: DEBUG oslo_vmware.api [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168164, 'name': ReconfigVM_Task, 'duration_secs': 0.133042} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.226834] env[68569]: DEBUG nova.virt.vmwareapi.volumeops [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-633781', 'volume_id': '41b8509b-279e-4d25-8a21-56b9db8be1ad', 'name': 'volume-41b8509b-279e-4d25-8a21-56b9db8be1ad', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c309956b-1da2-4c75-a23e-2c34286a4849', 'attached_at': '', 'detached_at': '', 'volume_id': '41b8509b-279e-4d25-8a21-56b9db8be1ad', 'serial': '41b8509b-279e-4d25-8a21-56b9db8be1ad'} {{(pid=68569) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1597.765703] env[68569]: DEBUG nova.objects.instance [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lazy-loading 'flavor' on Instance uuid c309956b-1da2-4c75-a23e-2c34286a4849 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1598.774957] env[68569]: DEBUG oslo_concurrency.lockutils [None req-98d58a93-17f2-4c74-906b-6c6a6d9923f2 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "c309956b-1da2-4c75-a23e-2c34286a4849" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.206s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1599.872178] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "c309956b-1da2-4c75-a23e-2c34286a4849" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1599.872560] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "c309956b-1da2-4c75-a23e-2c34286a4849" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1599.872680] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "c309956b-1da2-4c75-a23e-2c34286a4849-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1599.872832] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "c309956b-1da2-4c75-a23e-2c34286a4849-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1599.873051] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "c309956b-1da2-4c75-a23e-2c34286a4849-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1599.875193] env[68569]: INFO nova.compute.manager [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Terminating instance [ 1600.379312] env[68569]: DEBUG nova.compute.manager [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1600.379543] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1600.380436] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1338ad-5224-41fa-b4b0-2887a409582b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.388323] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1600.388539] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd4e843b-82b7-44a3-a488-87eb83fd6fac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.395556] env[68569]: DEBUG oslo_vmware.api [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1600.395556] env[68569]: value = "task-3168165" [ 1600.395556] env[68569]: _type = "Task" [ 1600.395556] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.403189] env[68569]: DEBUG oslo_vmware.api [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.905219] env[68569]: DEBUG oslo_vmware.api [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168165, 'name': PowerOffVM_Task, 'duration_secs': 0.176053} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.905568] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1600.905663] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1600.905896] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3962b64d-fafd-4a63-9aac-52354b1e31bd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.970968] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1600.971209] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Deleting contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1600.971428] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Deleting the datastore file [datastore1] c309956b-1da2-4c75-a23e-2c34286a4849 {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1600.971730] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f01b20bb-cac6-4918-b222-f0114037aab5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.978279] env[68569]: DEBUG oslo_vmware.api [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1600.978279] env[68569]: value = "task-3168167" [ 1600.978279] env[68569]: _type = "Task" [ 1600.978279] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.985626] env[68569]: DEBUG oslo_vmware.api [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168167, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.487528] env[68569]: DEBUG oslo_vmware.api [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168167, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140307} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.487777] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1601.487956] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Deleted contents of the VM from datastore datastore1 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1601.488152] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1601.488325] env[68569]: INFO nova.compute.manager [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1601.488567] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1601.488751] env[68569]: DEBUG nova.compute.manager [-] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1601.488842] env[68569]: DEBUG nova.network.neutron [-] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1601.907608] env[68569]: DEBUG nova.compute.manager [req-449edcd9-c912-4b49-8cb3-72f974484cfc req-a4e804e4-9ccf-4180-a3ca-2374b043ffe2 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Received event network-vif-deleted-723a385f-adb7-495c-b42d-e888e80473ad {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1601.907608] env[68569]: INFO nova.compute.manager [req-449edcd9-c912-4b49-8cb3-72f974484cfc req-a4e804e4-9ccf-4180-a3ca-2374b043ffe2 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Neutron deleted interface 723a385f-adb7-495c-b42d-e888e80473ad; detaching it from the instance and deleting it from the info cache [ 1601.907608] env[68569]: DEBUG nova.network.neutron [req-449edcd9-c912-4b49-8cb3-72f974484cfc req-a4e804e4-9ccf-4180-a3ca-2374b043ffe2 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.391073] env[68569]: DEBUG nova.network.neutron [-] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.410050] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ba379b7-6235-475a-b8f4-6da354c02690 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.419096] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac851143-9dc2-4e48-8f4b-ac657a57242d {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.446393] env[68569]: DEBUG nova.compute.manager [req-449edcd9-c912-4b49-8cb3-72f974484cfc req-a4e804e4-9ccf-4180-a3ca-2374b043ffe2 service nova] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Detach interface failed, port_id=723a385f-adb7-495c-b42d-e888e80473ad, reason: Instance c309956b-1da2-4c75-a23e-2c34286a4849 could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1602.895954] env[68569]: INFO nova.compute.manager [-] [instance: c309956b-1da2-4c75-a23e-2c34286a4849] Took 1.41 seconds to deallocate network for instance. [ 1603.401856] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1603.402215] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1603.402388] env[68569]: DEBUG nova.objects.instance [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lazy-loading 'resources' on Instance uuid c309956b-1da2-4c75-a23e-2c34286a4849 {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1603.955887] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd36bf6-8da5-49bf-88e5-110a2c9c29f7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.963106] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964f087f-0d18-4a28-a86f-a2e7bb3398af {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.992173] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda6c183-6239-456a-b4a9-401885243fd7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.998931] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ff07c6-a006-4837-9432-0408fbf25b2c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.011279] env[68569]: DEBUG nova.compute.provider_tree [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1604.514895] env[68569]: DEBUG nova.scheduler.client.report [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1605.020543] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.618s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1605.042102] env[68569]: INFO nova.scheduler.client.report [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Deleted allocations for instance c309956b-1da2-4c75-a23e-2c34286a4849 [ 1605.551321] env[68569]: DEBUG oslo_concurrency.lockutils [None req-7ec5db1b-6ac1-46ad-8df4-4d9a35485761 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "c309956b-1da2-4c75-a23e-2c34286a4849" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.679s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1611.002648] env[68569]: DEBUG oslo_concurrency.lockutils [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "fad4a057-8e5d-40c9-96ec-815910eb2dcd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1611.002977] env[68569]: DEBUG oslo_concurrency.lockutils [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "fad4a057-8e5d-40c9-96ec-815910eb2dcd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1611.003148] env[68569]: DEBUG oslo_concurrency.lockutils [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "fad4a057-8e5d-40c9-96ec-815910eb2dcd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1611.003374] env[68569]: DEBUG oslo_concurrency.lockutils [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "fad4a057-8e5d-40c9-96ec-815910eb2dcd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1611.003559] env[68569]: DEBUG oslo_concurrency.lockutils [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "fad4a057-8e5d-40c9-96ec-815910eb2dcd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1611.005759] env[68569]: INFO nova.compute.manager [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Terminating instance [ 1611.510298] env[68569]: DEBUG nova.compute.manager [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1611.510569] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1611.511473] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d70d338-3519-42f1-b5d5-ecb1c54a16ac {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.518936] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1611.519164] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-960c5b25-4895-4448-977a-b2f0048fb145 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.524538] env[68569]: DEBUG oslo_vmware.api [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1611.524538] env[68569]: value = "task-3168169" [ 1611.524538] env[68569]: _type = "Task" [ 1611.524538] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.531985] env[68569]: DEBUG oslo_vmware.api [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168169, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.034499] env[68569]: DEBUG oslo_vmware.api [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168169, 'name': PowerOffVM_Task, 'duration_secs': 0.19444} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.034871] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1612.034908] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1612.035156] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9b341f5-b1b2-4f41-a3f4-cfd0e5221a05 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.093241] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1612.093505] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1612.093694] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Deleting the datastore file [datastore2] fad4a057-8e5d-40c9-96ec-815910eb2dcd {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1612.093952] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30fca1e4-d743-413d-bac7-e4df2c494f64 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.100329] env[68569]: DEBUG oslo_vmware.api [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1612.100329] env[68569]: value = "task-3168171" [ 1612.100329] env[68569]: _type = "Task" [ 1612.100329] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.107719] env[68569]: DEBUG oslo_vmware.api [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168171, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.612484] env[68569]: DEBUG oslo_vmware.api [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168171, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175594} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.612737] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1612.612917] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1612.613109] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1612.613293] env[68569]: INFO nova.compute.manager [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1612.613522] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1612.613712] env[68569]: DEBUG nova.compute.manager [-] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1612.613804] env[68569]: DEBUG nova.network.neutron [-] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1612.824594] env[68569]: DEBUG nova.compute.manager [req-594bdc1e-b0c4-4fc9-b478-98d52a3af930 req-b509b8c9-5940-4ead-aaf3-8b008dd708d1 service nova] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Received event network-vif-deleted-d2756c2c-59f0-4d39-a132-7170724fdf05 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1612.824594] env[68569]: INFO nova.compute.manager [req-594bdc1e-b0c4-4fc9-b478-98d52a3af930 req-b509b8c9-5940-4ead-aaf3-8b008dd708d1 service nova] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Neutron deleted interface d2756c2c-59f0-4d39-a132-7170724fdf05; detaching it from the instance and deleting it from the info cache [ 1612.824783] env[68569]: DEBUG nova.network.neutron [req-594bdc1e-b0c4-4fc9-b478-98d52a3af930 req-b509b8c9-5940-4ead-aaf3-8b008dd708d1 service nova] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.309521] env[68569]: DEBUG nova.network.neutron [-] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.327408] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f0ab20dc-d09e-4f64-bd04-4f404d9ad78b {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.337416] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b018ba6c-99ab-4fe2-813b-55b53a15ebcb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.362469] env[68569]: DEBUG nova.compute.manager [req-594bdc1e-b0c4-4fc9-b478-98d52a3af930 req-b509b8c9-5940-4ead-aaf3-8b008dd708d1 service nova] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Detach interface failed, port_id=d2756c2c-59f0-4d39-a132-7170724fdf05, reason: Instance fad4a057-8e5d-40c9-96ec-815910eb2dcd could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1613.812056] env[68569]: INFO nova.compute.manager [-] [instance: fad4a057-8e5d-40c9-96ec-815910eb2dcd] Took 1.20 seconds to deallocate network for instance. [ 1614.317770] env[68569]: DEBUG oslo_concurrency.lockutils [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1614.318091] env[68569]: DEBUG oslo_concurrency.lockutils [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1614.318303] env[68569]: DEBUG nova.objects.instance [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lazy-loading 'resources' on Instance uuid fad4a057-8e5d-40c9-96ec-815910eb2dcd {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1614.860092] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067c81bc-9c43-4d3f-a8b4-3ff3c859932f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.867477] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ce0add-b579-4602-bb45-f5c75014c52c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.896539] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd12bd7-d086-4877-b7b0-aeae7cfc9fd7 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.903053] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bad2a21-0f6f-4345-a2de-2810fbee1f24 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.915547] env[68569]: DEBUG nova.compute.provider_tree [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1615.418611] env[68569]: DEBUG nova.scheduler.client.report [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1615.925359] env[68569]: DEBUG oslo_concurrency.lockutils [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.607s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1615.950069] env[68569]: INFO nova.scheduler.client.report [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Deleted allocations for instance fad4a057-8e5d-40c9-96ec-815910eb2dcd [ 1616.456937] env[68569]: DEBUG oslo_concurrency.lockutils [None req-076c3ac2-4019-49e8-8821-b776af425e95 tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "fad4a057-8e5d-40c9-96ec-815910eb2dcd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.454s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1617.637718] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1617.638098] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1617.638098] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "e8ba2946-f1d1-4b1f-b915-ba80ac5f087f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1617.638303] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "e8ba2946-f1d1-4b1f-b915-ba80ac5f087f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1617.638568] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "e8ba2946-f1d1-4b1f-b915-ba80ac5f087f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1617.641014] env[68569]: INFO nova.compute.manager [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Terminating instance [ 1618.145576] env[68569]: DEBUG nova.compute.manager [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Start destroying the instance on the hypervisor. {{(pid=68569) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1618.145852] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Destroying instance {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1618.146721] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de23f7e8-5b38-4f98-a5f5-cf0dccdf6e86 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.154520] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Powering off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1618.154739] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb1de72c-5b8a-4e70-a023-e667a12a14cd {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.160554] env[68569]: DEBUG oslo_vmware.api [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1618.160554] env[68569]: value = "task-3168172" [ 1618.160554] env[68569]: _type = "Task" [ 1618.160554] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.167714] env[68569]: DEBUG oslo_vmware.api [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168172, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.670215] env[68569]: DEBUG oslo_vmware.api [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168172, 'name': PowerOffVM_Task, 'duration_secs': 0.194706} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.670583] env[68569]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Powered off the VM {{(pid=68569) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1618.670583] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Unregistering the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1618.670822] env[68569]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfa7d15a-fae3-4694-acb8-ca78980711f6 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.729348] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Unregistered the VM {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1618.729618] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Deleting contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1618.729855] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Deleting the datastore file [datastore2] e8ba2946-f1d1-4b1f-b915-ba80ac5f087f {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1618.730135] env[68569]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2949f118-3f35-4f6d-b22d-23a0b21de293 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.736417] env[68569]: DEBUG oslo_vmware.api [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for the task: (returnval){ [ 1618.736417] env[68569]: value = "task-3168174" [ 1618.736417] env[68569]: _type = "Task" [ 1618.736417] env[68569]: } to complete. {{(pid=68569) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.743326] env[68569]: DEBUG oslo_vmware.api [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168174, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.246232] env[68569]: DEBUG oslo_vmware.api [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Task: {'id': task-3168174, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136967} completed successfully. {{(pid=68569) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.246487] env[68569]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Deleted the datastore file {{(pid=68569) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1619.246676] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Deleted contents of the VM from datastore datastore2 {{(pid=68569) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1619.246841] env[68569]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Instance destroyed {{(pid=68569) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1619.247012] env[68569]: INFO nova.compute.manager [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1619.247258] env[68569]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68569) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1619.247433] env[68569]: DEBUG nova.compute.manager [-] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Deallocating network for instance {{(pid=68569) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1619.247525] env[68569]: DEBUG nova.network.neutron [-] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] deallocate_for_instance() {{(pid=68569) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1619.484318] env[68569]: DEBUG nova.compute.manager [req-05063275-1aa1-49da-986a-adac2d5bf117 req-ac0cdcc0-b2b7-4838-9622-738fb526ff61 service nova] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Received event network-vif-deleted-20033b81-8427-4557-b3d1-160c08d94511 {{(pid=68569) external_instance_event /opt/stack/nova/nova/compute/manager.py:11744}} [ 1619.484496] env[68569]: INFO nova.compute.manager [req-05063275-1aa1-49da-986a-adac2d5bf117 req-ac0cdcc0-b2b7-4838-9622-738fb526ff61 service nova] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Neutron deleted interface 20033b81-8427-4557-b3d1-160c08d94511; detaching it from the instance and deleting it from the info cache [ 1619.484715] env[68569]: DEBUG nova.network.neutron [req-05063275-1aa1-49da-986a-adac2d5bf117 req-ac0cdcc0-b2b7-4838-9622-738fb526ff61 service nova] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.966572] env[68569]: DEBUG nova.network.neutron [-] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Updating instance_info_cache with network_info: [] {{(pid=68569) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.987826] env[68569]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-14cc88ce-dd26-4553-8491-5f2707e21f15 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.997227] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0089801f-3445-4564-abf7-e551b374045a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.020747] env[68569]: DEBUG nova.compute.manager [req-05063275-1aa1-49da-986a-adac2d5bf117 req-ac0cdcc0-b2b7-4838-9622-738fb526ff61 service nova] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Detach interface failed, port_id=20033b81-8427-4557-b3d1-160c08d94511, reason: Instance e8ba2946-f1d1-4b1f-b915-ba80ac5f087f could not be found. {{(pid=68569) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11578}} [ 1620.468633] env[68569]: INFO nova.compute.manager [-] [instance: e8ba2946-f1d1-4b1f-b915-ba80ac5f087f] Took 1.22 seconds to deallocate network for instance. [ 1620.974853] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1620.975258] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1620.975365] env[68569]: DEBUG nova.objects.instance [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lazy-loading 'resources' on Instance uuid e8ba2946-f1d1-4b1f-b915-ba80ac5f087f {{(pid=68569) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1621.508306] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42349a5-a40a-490b-9404-df12cb1369db {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.516220] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a33e25a-7254-47e4-ae3a-55d97ae856f2 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.544859] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b5526a-cd1e-4f15-aa14-4deea35718ea {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.551684] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e38d12c-5964-4140-99cf-66fe1d17d1bb {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.565492] env[68569]: DEBUG nova.compute.provider_tree [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1622.068590] env[68569]: DEBUG nova.scheduler.client.report [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1622.573391] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.598s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1622.594411] env[68569]: INFO nova.scheduler.client.report [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Deleted allocations for instance e8ba2946-f1d1-4b1f-b915-ba80ac5f087f [ 1623.102366] env[68569]: DEBUG oslo_concurrency.lockutils [None req-f1b7e4f9-c96c-4f9e-9775-43657e0e795d tempest-ServerRescueNegativeTestJSON-657930027 tempest-ServerRescueNegativeTestJSON-657930027-project-member] Lock "e8ba2946-f1d1-4b1f-b915-ba80ac5f087f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.464s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1631.169034] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1631.169419] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1631.674230] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1631.674439] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1631.674588] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1631.674726] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1631.674867] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1631.675028] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1631.675182] env[68569]: DEBUG nova.compute.manager [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68569) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11160}} [ 1631.675334] env[68569]: DEBUG oslo_service.periodic_task [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68569) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1632.178492] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1632.178874] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1632.178912] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1632.179076] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68569) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1632.180037] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91260cb-45b5-4b26-95b6-d6b5fe451714 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.188162] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b00629-b22e-46ae-ad74-67105aafecc1 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.201731] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2338b4d7-644e-47bc-a5ef-885b2c817a91 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.207571] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fa4e59-cd67-4512-a613-ffcaed781a3a {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.236872] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180504MB free_disk=129GB free_vcpus=48 pci_devices=None {{(pid=68569) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1632.237056] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1632.237253] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1633.257609] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1633.257867] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68569) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1633.271158] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae839af3-0472-4d24-ace0-3da7578ce65f {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.278477] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641a8962-fc3e-42c6-8da6-d75e4b0f6665 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.306988] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc749fc2-3f18-40c4-bc92-140bd53f5d9c {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.313303] env[68569]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce90af1-afcf-4c2e-8336-7f860229eec5 {{(pid=68569) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.325482] env[68569]: DEBUG nova.compute.provider_tree [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed in ProviderTree for provider: a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 {{(pid=68569) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1633.828431] env[68569]: DEBUG nova.scheduler.client.report [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Inventory has not changed for provider a29ec0c8-0dd6-4f8e-97c1-2cb80c3ac3d6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 129, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68569) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1634.334856] env[68569]: DEBUG nova.compute.resource_tracker [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68569) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1634.335185] env[68569]: DEBUG oslo_concurrency.lockutils [None req-910d62ce-39e5-4b8c-a757-704b284d16d8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.098s {{(pid=68569) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}}